diff --git a/README.md b/README.md index beab0ecbb10af31838126e315e9776fdbe5b7595..a2b7fac6d4fef8f82e38d76642f5bec8b61594d1 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,27 @@ --- license: llama2 +tags: +- vision-language model +- llama +- video understanding --- + +# Flash-VStream Model Card + + + +## Model details +We proposed Flash-VStream, a video-language model that simulates the memory mechanism of human. Our model is able to process extremely long video streams in real-time and respond to user queries simultaneously. + +## License +Llama 2 is licensed under the LLAMA 2 Community License, +Copyright (c) Meta Platforms, Inc. All Rights Reserved. + +## Training data +This model is trained based on image data from LLaVA-1.5 dataset, and video data from WebVid and ActivityNet datasets following LLaMA-VID, including +- 558K filtered image-text pairs from LAION/CC/SBU, captioned by BLIP. +- 158K GPT-generated multimodal instruction-following data. +- 450K academic-task-oriented VQA data mixture. +- 40K ShareGPT data. +- 232K video-caption pairs sampled from the WebVid 2.5M dataset. +- 98K videos from ActivityNet with QA pairs from Video-ChatGPT. diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..303934819295ca4b9a0cf01585b492490244f322 --- /dev/null +++ b/config.json @@ -0,0 +1,56 @@ +{ + "_name_or_path": "./checkpoints-pretrain/vstream-vicuna-7b-pretrain-weighted_kmeans1*8-25*4-25*1/checkpoint-3000", + "architectures": [ + "VStreamLlamaForCausalLM" + ], + "bos_token_id": 1, + "compress_Turing_memory_size": 1, + "compress_Turing_update_ratio": 0.2, + "compress_long_memory_size": 4, + "compress_size": 8, + "compress_type": "mean", + "eos_token_id": 2, + "freeze_mm_mlp_adapter": false, + "freeze_mm_vision_resampler": false, + "hidden_act": "silu", + "hidden_size": 4096, + "image_aspect_ratio": "pad", + "initializer_range": 0.02, + "intermediate_size": 11008, + "max_position_embeddings": 4096, + "mm_hidden_size": 1024, + "mm_projector_lr": null, + "mm_projector_type": "mlp2x_gelu", + "mm_resampler_type": null, + "mm_use_4_vision_tokens": false, + "mm_use_im_patch_token": false, + "mm_use_im_start_end": false, + "mm_vision_select_feature": "patch", + "mm_vision_select_layer": -2, + "mm_vision_tower": "./ckpt/clip-vit-large-patch14", + "model_type": "vstream", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "pad_token_id": 0, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "tie_word_embeddings": false, + "tokenizer_model_max_length": 2048, + "tokenizer_padding_side": "right", + "torch_dtype": "bfloat16", + "transformers_version": "4.31.0", + "tune_mm_mlp_adapter": false, + "tune_mm_vision_resampler": false, + "unfreeze_mm_vision_tower": false, + "use_cache": false, + "use_mm_proj": true, + "video_Turing_memory_length": 25, + "video_current_memory_length": 1, + "video_long_memory_length": 25, + "video_max_frames": 26, + "video_sample_type": "weighted_kmeans", + "video_short_memory_length": 10, + "vocab_size": 32000 +} diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57dbab308581e71f2adb7e9335aad7bc02cf3bb4 --- /dev/null +++ b/generation_config.json @@ -0,0 +1,9 @@ +{ + "bos_token_id": 1, + "eos_token_id": 2, + "max_length": 4096, + "pad_token_id": 0, + "temperature": 0.9, + "top_p": 0.6, + "transformers_version": "4.31.0" +} diff --git a/pytorch_model-00001-of-00002.bin b/pytorch_model-00001-of-00002.bin new file mode 100644 index 0000000000000000000000000000000000000000..fce6bea119b8656e661a4448840bf68e79fef6f3 --- /dev/null +++ b/pytorch_model-00001-of-00002.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63302cc0909608c878e1d96dd914ad8850d5e0300e69895f2cbb2893ab966796 +size 9976631486 diff --git a/pytorch_model-00002-of-00002.bin b/pytorch_model-00002-of-00002.bin new file mode 100644 index 0000000000000000000000000000000000000000..d8199239a4aaf32af61b64595910c52b3c61ecfd --- /dev/null +++ b/pytorch_model-00002-of-00002.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f62296b51622a18041cd2f9f923927281894fe59b2cbbcbda533c7eba97a75aa +size 4149054736 diff --git a/pytorch_model.bin.index.json b/pytorch_model.bin.index.json new file mode 100644 index 0000000000000000000000000000000000000000..c97d119925c8c42af9fc51ac04a4c348854b01b9 --- /dev/null +++ b/pytorch_model.bin.index.json @@ -0,0 +1,735 @@ +{ + "metadata": { + "total_size": 14125422784 + }, + "weight_map": { + "lm_head.weight": "pytorch_model-00002-of-00002.bin", + "model.attention_model.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.attention_model.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.attention_model.out_ln.bias": "pytorch_model-00002-of-00002.bin", + "model.attention_model.out_ln.weight": "pytorch_model-00002-of-00002.bin", + "model.attention_model.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.attention_model.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.attention_model.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.attention_model.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.attention_model.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.attention_model.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.30.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.input_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin", + "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin", + "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin", + "model.mm_projector.0.bias": "pytorch_model-00002-of-00002.bin", + "model.mm_projector.0.weight": "pytorch_model-00002-of-00002.bin", + "model.mm_projector.2.bias": "pytorch_model-00002-of-00002.bin", + "model.mm_projector.2.weight": "pytorch_model-00002-of-00002.bin", + "model.norm.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "pytorch_model-00002-of-00002.bin", + "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "pytorch_model-00002-of-00002.bin" + } +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,24 @@ +{ + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": "", + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tokenizer.model b/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899 --- /dev/null +++ b/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 +size 499723 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..740756b4bef305e27d0bb4d2e1a40dd8847797f7 --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,35 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "bos_token": { + "__type": "AddedToken", + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "clean_up_tokenization_spaces": false, + "eos_token": { + "__type": "AddedToken", + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "legacy": false, + "model_max_length": 2048, + "pad_token": null, + "padding_side": "right", + "sp_model_kwargs": {}, + "tokenizer_class": "LlamaTokenizer", + "unk_token": { + "__type": "AddedToken", + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/trainer_state.json b/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..7d30923ab9956955ee7cab870ec3da7b52edd1df --- /dev/null +++ b/trainer_state.json @@ -0,0 +1,35416 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.989103101424979, + "global_step": 5900, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 1.11731843575419e-07, + "loss": 1.4195, + "step": 1 + }, + { + "epoch": 0.0, + "learning_rate": 2.23463687150838e-07, + "loss": 1.3823, + "step": 2 + }, + { + "epoch": 0.0, + "learning_rate": 3.3519553072625703e-07, + "loss": 1.4459, + "step": 3 + }, + { + "epoch": 0.0, + "learning_rate": 4.46927374301676e-07, + "loss": 1.5393, + "step": 4 + }, + { + "epoch": 0.0, + "learning_rate": 5.58659217877095e-07, + "loss": 1.4448, + "step": 5 + }, + { + "epoch": 0.0, + "learning_rate": 6.703910614525141e-07, + "loss": 1.4223, + "step": 6 + }, + { + "epoch": 0.0, + "learning_rate": 7.82122905027933e-07, + "loss": 1.4144, + "step": 7 + }, + { + "epoch": 0.0, + "learning_rate": 8.93854748603352e-07, + "loss": 1.3139, + "step": 8 + }, + { + "epoch": 0.0, + "learning_rate": 1.005586592178771e-06, + "loss": 1.4296, + "step": 9 + }, + { + "epoch": 0.0, + "learning_rate": 1.11731843575419e-06, + "loss": 1.4222, + "step": 10 + }, + { + "epoch": 0.0, + "learning_rate": 1.229050279329609e-06, + "loss": 1.3935, + "step": 11 + }, + { + "epoch": 0.0, + "learning_rate": 1.3407821229050281e-06, + "loss": 1.3394, + "step": 12 + }, + { + "epoch": 0.0, + "learning_rate": 1.4525139664804472e-06, + "loss": 1.4121, + "step": 13 + }, + { + "epoch": 0.0, + "learning_rate": 1.564245810055866e-06, + "loss": 1.3469, + "step": 14 + }, + { + "epoch": 0.0, + "learning_rate": 1.675977653631285e-06, + "loss": 1.4301, + "step": 15 + }, + { + "epoch": 0.0, + "learning_rate": 1.787709497206704e-06, + "loss": 1.3138, + "step": 16 + }, + { + "epoch": 0.0, + "learning_rate": 1.899441340782123e-06, + "loss": 1.2787, + "step": 17 + }, + { + "epoch": 0.0, + "learning_rate": 2.011173184357542e-06, + "loss": 1.2886, + "step": 18 + }, + { + "epoch": 0.0, + "learning_rate": 2.1229050279329612e-06, + "loss": 1.2743, + "step": 19 + }, + { + "epoch": 0.0, + "learning_rate": 2.23463687150838e-06, + "loss": 1.2688, + "step": 20 + }, + { + "epoch": 0.0, + "learning_rate": 2.3463687150837993e-06, + "loss": 1.2021, + "step": 21 + }, + { + "epoch": 0.0, + "learning_rate": 2.458100558659218e-06, + "loss": 1.2191, + "step": 22 + }, + { + "epoch": 0.0, + "learning_rate": 2.569832402234637e-06, + "loss": 1.1803, + "step": 23 + }, + { + "epoch": 0.0, + "learning_rate": 2.6815642458100562e-06, + "loss": 1.2004, + "step": 24 + }, + { + "epoch": 0.0, + "learning_rate": 2.793296089385475e-06, + "loss": 1.1655, + "step": 25 + }, + { + "epoch": 0.0, + "learning_rate": 2.9050279329608943e-06, + "loss": 0.3161, + "step": 26 + }, + { + "epoch": 0.0, + "learning_rate": 3.016759776536313e-06, + "loss": 1.2154, + "step": 27 + }, + { + "epoch": 0.0, + "learning_rate": 3.128491620111732e-06, + "loss": 1.2058, + "step": 28 + }, + { + "epoch": 0.0, + "learning_rate": 3.240223463687151e-06, + "loss": 1.2751, + "step": 29 + }, + { + "epoch": 0.01, + "learning_rate": 3.35195530726257e-06, + "loss": 1.1332, + "step": 30 + }, + { + "epoch": 0.01, + "learning_rate": 3.4636871508379893e-06, + "loss": 1.1565, + "step": 31 + }, + { + "epoch": 0.01, + "learning_rate": 3.575418994413408e-06, + "loss": 1.1614, + "step": 32 + }, + { + "epoch": 0.01, + "learning_rate": 3.687150837988827e-06, + "loss": 1.1344, + "step": 33 + }, + { + "epoch": 0.01, + "learning_rate": 3.798882681564246e-06, + "loss": 0.3087, + "step": 34 + }, + { + "epoch": 0.01, + "learning_rate": 3.910614525139665e-06, + "loss": 1.0746, + "step": 35 + }, + { + "epoch": 0.01, + "learning_rate": 4.022346368715084e-06, + "loss": 1.1482, + "step": 36 + }, + { + "epoch": 0.01, + "learning_rate": 4.134078212290504e-06, + "loss": 0.3071, + "step": 37 + }, + { + "epoch": 0.01, + "learning_rate": 4.2458100558659224e-06, + "loss": 1.1249, + "step": 38 + }, + { + "epoch": 0.01, + "learning_rate": 4.357541899441341e-06, + "loss": 0.3115, + "step": 39 + }, + { + "epoch": 0.01, + "learning_rate": 4.46927374301676e-06, + "loss": 1.1591, + "step": 40 + }, + { + "epoch": 0.01, + "learning_rate": 4.581005586592179e-06, + "loss": 1.141, + "step": 41 + }, + { + "epoch": 0.01, + "learning_rate": 4.692737430167599e-06, + "loss": 1.1086, + "step": 42 + }, + { + "epoch": 0.01, + "learning_rate": 4.8044692737430175e-06, + "loss": 1.085, + "step": 43 + }, + { + "epoch": 0.01, + "learning_rate": 4.916201117318436e-06, + "loss": 1.1062, + "step": 44 + }, + { + "epoch": 0.01, + "learning_rate": 5.027932960893855e-06, + "loss": 1.1468, + "step": 45 + }, + { + "epoch": 0.01, + "learning_rate": 5.139664804469274e-06, + "loss": 1.0687, + "step": 46 + }, + { + "epoch": 0.01, + "learning_rate": 5.251396648044693e-06, + "loss": 1.0904, + "step": 47 + }, + { + "epoch": 0.01, + "learning_rate": 5.3631284916201125e-06, + "loss": 1.0541, + "step": 48 + }, + { + "epoch": 0.01, + "learning_rate": 5.474860335195531e-06, + "loss": 0.3017, + "step": 49 + }, + { + "epoch": 0.01, + "learning_rate": 5.58659217877095e-06, + "loss": 1.1023, + "step": 50 + }, + { + "epoch": 0.01, + "learning_rate": 5.698324022346369e-06, + "loss": 1.0348, + "step": 51 + }, + { + "epoch": 0.01, + "learning_rate": 5.810055865921789e-06, + "loss": 1.0711, + "step": 52 + }, + { + "epoch": 0.01, + "learning_rate": 5.9217877094972075e-06, + "loss": 1.0878, + "step": 53 + }, + { + "epoch": 0.01, + "learning_rate": 6.033519553072626e-06, + "loss": 1.0898, + "step": 54 + }, + { + "epoch": 0.01, + "learning_rate": 6.145251396648045e-06, + "loss": 1.0799, + "step": 55 + }, + { + "epoch": 0.01, + "learning_rate": 6.256983240223464e-06, + "loss": 1.0543, + "step": 56 + }, + { + "epoch": 0.01, + "learning_rate": 6.368715083798883e-06, + "loss": 1.1395, + "step": 57 + }, + { + "epoch": 0.01, + "learning_rate": 6.480446927374302e-06, + "loss": 1.0945, + "step": 58 + }, + { + "epoch": 0.01, + "learning_rate": 6.592178770949721e-06, + "loss": 1.0429, + "step": 59 + }, + { + "epoch": 0.01, + "learning_rate": 6.70391061452514e-06, + "loss": 1.1208, + "step": 60 + }, + { + "epoch": 0.01, + "learning_rate": 6.815642458100559e-06, + "loss": 1.085, + "step": 61 + }, + { + "epoch": 0.01, + "learning_rate": 6.927374301675979e-06, + "loss": 1.0802, + "step": 62 + }, + { + "epoch": 0.01, + "learning_rate": 7.0391061452513975e-06, + "loss": 1.0949, + "step": 63 + }, + { + "epoch": 0.01, + "learning_rate": 7.150837988826816e-06, + "loss": 1.1396, + "step": 64 + }, + { + "epoch": 0.01, + "learning_rate": 7.262569832402235e-06, + "loss": 1.1551, + "step": 65 + }, + { + "epoch": 0.01, + "learning_rate": 7.374301675977654e-06, + "loss": 1.0647, + "step": 66 + }, + { + "epoch": 0.01, + "learning_rate": 7.486033519553073e-06, + "loss": 1.0458, + "step": 67 + }, + { + "epoch": 0.01, + "learning_rate": 7.597765363128492e-06, + "loss": 1.0191, + "step": 68 + }, + { + "epoch": 0.01, + "learning_rate": 7.709497206703911e-06, + "loss": 0.2835, + "step": 69 + }, + { + "epoch": 0.01, + "learning_rate": 7.82122905027933e-06, + "loss": 1.0698, + "step": 70 + }, + { + "epoch": 0.01, + "learning_rate": 7.932960893854749e-06, + "loss": 1.083, + "step": 71 + }, + { + "epoch": 0.01, + "learning_rate": 8.044692737430168e-06, + "loss": 1.0165, + "step": 72 + }, + { + "epoch": 0.01, + "learning_rate": 8.156424581005588e-06, + "loss": 1.1093, + "step": 73 + }, + { + "epoch": 0.01, + "learning_rate": 8.268156424581007e-06, + "loss": 1.0901, + "step": 74 + }, + { + "epoch": 0.01, + "learning_rate": 8.379888268156426e-06, + "loss": 1.1368, + "step": 75 + }, + { + "epoch": 0.01, + "learning_rate": 8.491620111731845e-06, + "loss": 1.0725, + "step": 76 + }, + { + "epoch": 0.01, + "learning_rate": 8.603351955307264e-06, + "loss": 1.0732, + "step": 77 + }, + { + "epoch": 0.01, + "learning_rate": 8.715083798882683e-06, + "loss": 0.9819, + "step": 78 + }, + { + "epoch": 0.01, + "learning_rate": 8.826815642458101e-06, + "loss": 1.0615, + "step": 79 + }, + { + "epoch": 0.01, + "learning_rate": 8.93854748603352e-06, + "loss": 1.0932, + "step": 80 + }, + { + "epoch": 0.01, + "learning_rate": 9.050279329608939e-06, + "loss": 1.0645, + "step": 81 + }, + { + "epoch": 0.01, + "learning_rate": 9.162011173184358e-06, + "loss": 1.0893, + "step": 82 + }, + { + "epoch": 0.01, + "learning_rate": 9.273743016759777e-06, + "loss": 1.0977, + "step": 83 + }, + { + "epoch": 0.01, + "learning_rate": 9.385474860335197e-06, + "loss": 1.0095, + "step": 84 + }, + { + "epoch": 0.01, + "learning_rate": 9.497206703910616e-06, + "loss": 1.0536, + "step": 85 + }, + { + "epoch": 0.01, + "learning_rate": 9.608938547486035e-06, + "loss": 0.3001, + "step": 86 + }, + { + "epoch": 0.01, + "learning_rate": 9.720670391061454e-06, + "loss": 1.1213, + "step": 87 + }, + { + "epoch": 0.01, + "learning_rate": 9.832402234636873e-06, + "loss": 1.0857, + "step": 88 + }, + { + "epoch": 0.01, + "learning_rate": 9.944134078212291e-06, + "loss": 1.1084, + "step": 89 + }, + { + "epoch": 0.02, + "learning_rate": 1.005586592178771e-05, + "loss": 1.0932, + "step": 90 + }, + { + "epoch": 0.02, + "learning_rate": 1.0167597765363129e-05, + "loss": 1.1151, + "step": 91 + }, + { + "epoch": 0.02, + "learning_rate": 1.0279329608938548e-05, + "loss": 1.1025, + "step": 92 + }, + { + "epoch": 0.02, + "learning_rate": 1.0391061452513967e-05, + "loss": 1.0693, + "step": 93 + }, + { + "epoch": 0.02, + "learning_rate": 1.0502793296089386e-05, + "loss": 1.0803, + "step": 94 + }, + { + "epoch": 0.02, + "learning_rate": 1.0614525139664806e-05, + "loss": 1.0208, + "step": 95 + }, + { + "epoch": 0.02, + "learning_rate": 1.0726256983240225e-05, + "loss": 1.0734, + "step": 96 + }, + { + "epoch": 0.02, + "learning_rate": 1.0837988826815644e-05, + "loss": 1.0538, + "step": 97 + }, + { + "epoch": 0.02, + "learning_rate": 1.0949720670391063e-05, + "loss": 1.0481, + "step": 98 + }, + { + "epoch": 0.02, + "learning_rate": 1.1061452513966481e-05, + "loss": 1.0069, + "step": 99 + }, + { + "epoch": 0.02, + "learning_rate": 1.11731843575419e-05, + "loss": 1.0412, + "step": 100 + }, + { + "epoch": 0.02, + "learning_rate": 1.1284916201117319e-05, + "loss": 1.0039, + "step": 101 + }, + { + "epoch": 0.02, + "learning_rate": 1.1396648044692738e-05, + "loss": 1.0952, + "step": 102 + }, + { + "epoch": 0.02, + "learning_rate": 1.1508379888268157e-05, + "loss": 1.1374, + "step": 103 + }, + { + "epoch": 0.02, + "learning_rate": 1.1620111731843577e-05, + "loss": 1.0281, + "step": 104 + }, + { + "epoch": 0.02, + "learning_rate": 1.1731843575418994e-05, + "loss": 1.0593, + "step": 105 + }, + { + "epoch": 0.02, + "learning_rate": 1.1843575418994415e-05, + "loss": 1.0191, + "step": 106 + }, + { + "epoch": 0.02, + "learning_rate": 1.1955307262569834e-05, + "loss": 1.0218, + "step": 107 + }, + { + "epoch": 0.02, + "learning_rate": 1.2067039106145253e-05, + "loss": 1.0797, + "step": 108 + }, + { + "epoch": 0.02, + "learning_rate": 1.2178770949720671e-05, + "loss": 1.0986, + "step": 109 + }, + { + "epoch": 0.02, + "learning_rate": 1.229050279329609e-05, + "loss": 1.0304, + "step": 110 + }, + { + "epoch": 0.02, + "learning_rate": 1.2402234636871509e-05, + "loss": 1.0778, + "step": 111 + }, + { + "epoch": 0.02, + "learning_rate": 1.2513966480446928e-05, + "loss": 1.134, + "step": 112 + }, + { + "epoch": 0.02, + "learning_rate": 1.2625698324022347e-05, + "loss": 1.0307, + "step": 113 + }, + { + "epoch": 0.02, + "learning_rate": 1.2737430167597766e-05, + "loss": 0.266, + "step": 114 + }, + { + "epoch": 0.02, + "learning_rate": 1.2849162011173186e-05, + "loss": 1.0437, + "step": 115 + }, + { + "epoch": 0.02, + "learning_rate": 1.2960893854748603e-05, + "loss": 1.1123, + "step": 116 + }, + { + "epoch": 0.02, + "learning_rate": 1.3072625698324024e-05, + "loss": 1.0339, + "step": 117 + }, + { + "epoch": 0.02, + "learning_rate": 1.3184357541899443e-05, + "loss": 1.0634, + "step": 118 + }, + { + "epoch": 0.02, + "learning_rate": 1.3296089385474861e-05, + "loss": 1.003, + "step": 119 + }, + { + "epoch": 0.02, + "learning_rate": 1.340782122905028e-05, + "loss": 1.0444, + "step": 120 + }, + { + "epoch": 0.02, + "learning_rate": 1.3519553072625699e-05, + "loss": 0.9979, + "step": 121 + }, + { + "epoch": 0.02, + "learning_rate": 1.3631284916201118e-05, + "loss": 1.0716, + "step": 122 + }, + { + "epoch": 0.02, + "learning_rate": 1.3743016759776537e-05, + "loss": 1.0541, + "step": 123 + }, + { + "epoch": 0.02, + "learning_rate": 1.3854748603351957e-05, + "loss": 0.9872, + "step": 124 + }, + { + "epoch": 0.02, + "learning_rate": 1.3966480446927374e-05, + "loss": 1.0388, + "step": 125 + }, + { + "epoch": 0.02, + "learning_rate": 1.4078212290502795e-05, + "loss": 1.0212, + "step": 126 + }, + { + "epoch": 0.02, + "learning_rate": 1.4189944134078212e-05, + "loss": 0.985, + "step": 127 + }, + { + "epoch": 0.02, + "learning_rate": 1.4301675977653633e-05, + "loss": 0.2777, + "step": 128 + }, + { + "epoch": 0.02, + "learning_rate": 1.4413407821229052e-05, + "loss": 1.0523, + "step": 129 + }, + { + "epoch": 0.02, + "learning_rate": 1.452513966480447e-05, + "loss": 0.9581, + "step": 130 + }, + { + "epoch": 0.02, + "learning_rate": 1.463687150837989e-05, + "loss": 1.0563, + "step": 131 + }, + { + "epoch": 0.02, + "learning_rate": 1.4748603351955308e-05, + "loss": 1.07, + "step": 132 + }, + { + "epoch": 0.02, + "learning_rate": 1.4860335195530729e-05, + "loss": 1.0359, + "step": 133 + }, + { + "epoch": 0.02, + "learning_rate": 1.4972067039106146e-05, + "loss": 0.9764, + "step": 134 + }, + { + "epoch": 0.02, + "learning_rate": 1.5083798882681566e-05, + "loss": 1.016, + "step": 135 + }, + { + "epoch": 0.02, + "learning_rate": 1.5195530726256983e-05, + "loss": 1.0635, + "step": 136 + }, + { + "epoch": 0.02, + "learning_rate": 1.5307262569832404e-05, + "loss": 0.965, + "step": 137 + }, + { + "epoch": 0.02, + "learning_rate": 1.5418994413407823e-05, + "loss": 1.0639, + "step": 138 + }, + { + "epoch": 0.02, + "learning_rate": 1.553072625698324e-05, + "loss": 1.039, + "step": 139 + }, + { + "epoch": 0.02, + "learning_rate": 1.564245810055866e-05, + "loss": 0.2691, + "step": 140 + }, + { + "epoch": 0.02, + "learning_rate": 1.575418994413408e-05, + "loss": 1.0157, + "step": 141 + }, + { + "epoch": 0.02, + "learning_rate": 1.5865921787709498e-05, + "loss": 1.0212, + "step": 142 + }, + { + "epoch": 0.02, + "learning_rate": 1.5977653631284917e-05, + "loss": 1.0617, + "step": 143 + }, + { + "epoch": 0.02, + "learning_rate": 1.6089385474860336e-05, + "loss": 1.0756, + "step": 144 + }, + { + "epoch": 0.02, + "learning_rate": 1.6201117318435755e-05, + "loss": 1.0201, + "step": 145 + }, + { + "epoch": 0.02, + "learning_rate": 1.6312849162011177e-05, + "loss": 1.0184, + "step": 146 + }, + { + "epoch": 0.02, + "learning_rate": 1.6424581005586592e-05, + "loss": 1.054, + "step": 147 + }, + { + "epoch": 0.02, + "learning_rate": 1.6536312849162014e-05, + "loss": 1.1014, + "step": 148 + }, + { + "epoch": 0.02, + "learning_rate": 1.664804469273743e-05, + "loss": 1.0176, + "step": 149 + }, + { + "epoch": 0.03, + "learning_rate": 1.6759776536312852e-05, + "loss": 1.0576, + "step": 150 + }, + { + "epoch": 0.03, + "learning_rate": 1.687150837988827e-05, + "loss": 1.0996, + "step": 151 + }, + { + "epoch": 0.03, + "learning_rate": 1.698324022346369e-05, + "loss": 1.0468, + "step": 152 + }, + { + "epoch": 0.03, + "learning_rate": 1.709497206703911e-05, + "loss": 1.0631, + "step": 153 + }, + { + "epoch": 0.03, + "learning_rate": 1.7206703910614527e-05, + "loss": 1.0168, + "step": 154 + }, + { + "epoch": 0.03, + "learning_rate": 1.7318435754189946e-05, + "loss": 1.068, + "step": 155 + }, + { + "epoch": 0.03, + "learning_rate": 1.7430167597765365e-05, + "loss": 0.9837, + "step": 156 + }, + { + "epoch": 0.03, + "learning_rate": 1.7541899441340784e-05, + "loss": 0.3001, + "step": 157 + }, + { + "epoch": 0.03, + "learning_rate": 1.7653631284916203e-05, + "loss": 1.0957, + "step": 158 + }, + { + "epoch": 0.03, + "learning_rate": 1.776536312849162e-05, + "loss": 1.0985, + "step": 159 + }, + { + "epoch": 0.03, + "learning_rate": 1.787709497206704e-05, + "loss": 1.0373, + "step": 160 + }, + { + "epoch": 0.03, + "learning_rate": 1.798882681564246e-05, + "loss": 1.0332, + "step": 161 + }, + { + "epoch": 0.03, + "learning_rate": 1.8100558659217878e-05, + "loss": 1.045, + "step": 162 + }, + { + "epoch": 0.03, + "learning_rate": 1.8212290502793297e-05, + "loss": 1.0648, + "step": 163 + }, + { + "epoch": 0.03, + "learning_rate": 1.8324022346368716e-05, + "loss": 1.0253, + "step": 164 + }, + { + "epoch": 0.03, + "learning_rate": 1.8435754189944135e-05, + "loss": 0.9576, + "step": 165 + }, + { + "epoch": 0.03, + "learning_rate": 1.8547486033519553e-05, + "loss": 1.0447, + "step": 166 + }, + { + "epoch": 0.03, + "learning_rate": 1.8659217877094972e-05, + "loss": 0.9998, + "step": 167 + }, + { + "epoch": 0.03, + "learning_rate": 1.8770949720670394e-05, + "loss": 1.1409, + "step": 168 + }, + { + "epoch": 0.03, + "learning_rate": 1.888268156424581e-05, + "loss": 1.0662, + "step": 169 + }, + { + "epoch": 0.03, + "learning_rate": 1.8994413407821232e-05, + "loss": 1.0401, + "step": 170 + }, + { + "epoch": 0.03, + "learning_rate": 1.910614525139665e-05, + "loss": 1.0438, + "step": 171 + }, + { + "epoch": 0.03, + "learning_rate": 1.921787709497207e-05, + "loss": 1.0161, + "step": 172 + }, + { + "epoch": 0.03, + "learning_rate": 1.932960893854749e-05, + "loss": 1.1072, + "step": 173 + }, + { + "epoch": 0.03, + "learning_rate": 1.9441340782122907e-05, + "loss": 0.9698, + "step": 174 + }, + { + "epoch": 0.03, + "learning_rate": 1.9553072625698326e-05, + "loss": 1.027, + "step": 175 + }, + { + "epoch": 0.03, + "learning_rate": 1.9664804469273745e-05, + "loss": 1.0583, + "step": 176 + }, + { + "epoch": 0.03, + "learning_rate": 1.9776536312849164e-05, + "loss": 0.994, + "step": 177 + }, + { + "epoch": 0.03, + "learning_rate": 1.9888268156424583e-05, + "loss": 1.0902, + "step": 178 + }, + { + "epoch": 0.03, + "learning_rate": 2e-05, + "loss": 1.0203, + "step": 179 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999998525947744e-05, + "loss": 1.0104, + "step": 180 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999994103791404e-05, + "loss": 1.0164, + "step": 181 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999986733532287e-05, + "loss": 0.9776, + "step": 182 + }, + { + "epoch": 0.03, + "learning_rate": 1.999997641517256e-05, + "loss": 1.0917, + "step": 183 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999963148715276e-05, + "loss": 1.0055, + "step": 184 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999946934164337e-05, + "loss": 1.0454, + "step": 185 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999927771524526e-05, + "loss": 1.0124, + "step": 186 + }, + { + "epoch": 0.03, + "learning_rate": 1.999990566080149e-05, + "loss": 1.0107, + "step": 187 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999880602001752e-05, + "loss": 1.0456, + "step": 188 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999852595132698e-05, + "loss": 1.0576, + "step": 189 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999821640202584e-05, + "loss": 1.0671, + "step": 190 + }, + { + "epoch": 0.03, + "learning_rate": 1.999978773722054e-05, + "loss": 0.966, + "step": 191 + }, + { + "epoch": 0.03, + "learning_rate": 1.999975088619655e-05, + "loss": 1.1166, + "step": 192 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999711087141484e-05, + "loss": 1.028, + "step": 193 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999668340067083e-05, + "loss": 1.0081, + "step": 194 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999622644985937e-05, + "loss": 1.0438, + "step": 195 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999574001911526e-05, + "loss": 1.0737, + "step": 196 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999522410858187e-05, + "loss": 1.0315, + "step": 197 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999467871841126e-05, + "loss": 1.0126, + "step": 198 + }, + { + "epoch": 0.03, + "learning_rate": 1.999941038487643e-05, + "loss": 1.0461, + "step": 199 + }, + { + "epoch": 0.03, + "learning_rate": 1.999934994998104e-05, + "loss": 0.9935, + "step": 200 + }, + { + "epoch": 0.03, + "learning_rate": 1.999928656717278e-05, + "loss": 1.043, + "step": 201 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999220236470326e-05, + "loss": 0.9858, + "step": 202 + }, + { + "epoch": 0.03, + "learning_rate": 1.999915095789324e-05, + "loss": 1.0101, + "step": 203 + }, + { + "epoch": 0.03, + "learning_rate": 1.9999078731461942e-05, + "loss": 1.0375, + "step": 204 + }, + { + "epoch": 0.03, + "learning_rate": 1.999900355719773e-05, + "loss": 1.041, + "step": 205 + }, + { + "epoch": 0.03, + "learning_rate": 1.9998925435122757e-05, + "loss": 1.0311, + "step": 206 + }, + { + "epoch": 0.03, + "learning_rate": 1.9998844365260068e-05, + "loss": 1.0565, + "step": 207 + }, + { + "epoch": 0.03, + "learning_rate": 1.9998760347633556e-05, + "loss": 0.289, + "step": 208 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998673382267983e-05, + "loss": 1.0967, + "step": 209 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998583469189e-05, + "loss": 1.0046, + "step": 210 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998490608423106e-05, + "loss": 1.005, + "step": 211 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998394799997684e-05, + "loss": 0.3057, + "step": 212 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998296043940973e-05, + "loss": 0.9926, + "step": 213 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998194340282088e-05, + "loss": 1.0529, + "step": 214 + }, + { + "epoch": 0.04, + "learning_rate": 1.9998089689051014e-05, + "loss": 1.0194, + "step": 215 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997982090278606e-05, + "loss": 0.2541, + "step": 216 + }, + { + "epoch": 0.04, + "learning_rate": 1.999787154399658e-05, + "loss": 1.0362, + "step": 217 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997758050237533e-05, + "loss": 1.1033, + "step": 218 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997641609034916e-05, + "loss": 0.9754, + "step": 219 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997522220423062e-05, + "loss": 1.0493, + "step": 220 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997399884437167e-05, + "loss": 1.0513, + "step": 221 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997274601113294e-05, + "loss": 1.0015, + "step": 222 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997146370488384e-05, + "loss": 0.283, + "step": 223 + }, + { + "epoch": 0.04, + "learning_rate": 1.9997015192600235e-05, + "loss": 0.9815, + "step": 224 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996881067487526e-05, + "loss": 1.0113, + "step": 225 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996743995189793e-05, + "loss": 0.281, + "step": 226 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996603975747443e-05, + "loss": 1.078, + "step": 227 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996461009201767e-05, + "loss": 1.016, + "step": 228 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996315095594903e-05, + "loss": 1.0403, + "step": 229 + }, + { + "epoch": 0.04, + "learning_rate": 1.999616623496987e-05, + "loss": 1.0036, + "step": 230 + }, + { + "epoch": 0.04, + "learning_rate": 1.9996014427370556e-05, + "loss": 1.035, + "step": 231 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995859672841713e-05, + "loss": 1.0055, + "step": 232 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995701971428967e-05, + "loss": 0.9931, + "step": 233 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995541323178808e-05, + "loss": 0.964, + "step": 234 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995377728138597e-05, + "loss": 0.9457, + "step": 235 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995211186356564e-05, + "loss": 0.983, + "step": 236 + }, + { + "epoch": 0.04, + "learning_rate": 1.9995041697881805e-05, + "loss": 1.0148, + "step": 237 + }, + { + "epoch": 0.04, + "learning_rate": 1.999486926276429e-05, + "loss": 1.1194, + "step": 238 + }, + { + "epoch": 0.04, + "learning_rate": 1.9994693881054853e-05, + "loss": 0.9882, + "step": 239 + }, + { + "epoch": 0.04, + "learning_rate": 1.99945155528052e-05, + "loss": 0.9949, + "step": 240 + }, + { + "epoch": 0.04, + "learning_rate": 1.9994334278067905e-05, + "loss": 1.0431, + "step": 241 + }, + { + "epoch": 0.04, + "learning_rate": 1.9994150056896405e-05, + "loss": 0.9903, + "step": 242 + }, + { + "epoch": 0.04, + "learning_rate": 1.9993962889345012e-05, + "loss": 1.0833, + "step": 243 + }, + { + "epoch": 0.04, + "learning_rate": 1.9993772775468907e-05, + "loss": 1.0114, + "step": 244 + }, + { + "epoch": 0.04, + "learning_rate": 1.9993579715324135e-05, + "loss": 1.0357, + "step": 245 + }, + { + "epoch": 0.04, + "learning_rate": 1.9993383708967618e-05, + "loss": 0.9422, + "step": 246 + }, + { + "epoch": 0.04, + "learning_rate": 1.9993184756457132e-05, + "loss": 1.0163, + "step": 247 + }, + { + "epoch": 0.04, + "learning_rate": 1.9992982857851334e-05, + "loss": 1.0553, + "step": 248 + }, + { + "epoch": 0.04, + "learning_rate": 1.9992778013209752e-05, + "loss": 0.9633, + "step": 249 + }, + { + "epoch": 0.04, + "learning_rate": 1.9992570222592768e-05, + "loss": 0.9816, + "step": 250 + }, + { + "epoch": 0.04, + "learning_rate": 1.999235948606164e-05, + "loss": 1.0291, + "step": 251 + }, + { + "epoch": 0.04, + "learning_rate": 1.9992145803678505e-05, + "loss": 0.9977, + "step": 252 + }, + { + "epoch": 0.04, + "learning_rate": 1.999192917550635e-05, + "loss": 1.0898, + "step": 253 + }, + { + "epoch": 0.04, + "learning_rate": 1.9991709601609042e-05, + "loss": 1.0618, + "step": 254 + }, + { + "epoch": 0.04, + "learning_rate": 1.9991487082051314e-05, + "loss": 1.0597, + "step": 255 + }, + { + "epoch": 0.04, + "learning_rate": 1.9991261616898767e-05, + "loss": 0.9776, + "step": 256 + }, + { + "epoch": 0.04, + "learning_rate": 1.9991033206217868e-05, + "loss": 1.0153, + "step": 257 + }, + { + "epoch": 0.04, + "learning_rate": 1.999080185007596e-05, + "loss": 1.0944, + "step": 258 + }, + { + "epoch": 0.04, + "learning_rate": 1.9990567548541245e-05, + "loss": 1.0728, + "step": 259 + }, + { + "epoch": 0.04, + "learning_rate": 1.9990330301682798e-05, + "loss": 1.0651, + "step": 260 + }, + { + "epoch": 0.04, + "learning_rate": 1.9990090109570565e-05, + "loss": 1.1126, + "step": 261 + }, + { + "epoch": 0.04, + "learning_rate": 1.9989846972275356e-05, + "loss": 1.1006, + "step": 262 + }, + { + "epoch": 0.04, + "learning_rate": 1.9989600889868846e-05, + "loss": 0.9786, + "step": 263 + }, + { + "epoch": 0.04, + "learning_rate": 1.9989351862423585e-05, + "loss": 0.997, + "step": 264 + }, + { + "epoch": 0.04, + "learning_rate": 1.9989099890012993e-05, + "loss": 1.0592, + "step": 265 + }, + { + "epoch": 0.04, + "learning_rate": 1.998884497271135e-05, + "loss": 1.0076, + "step": 266 + }, + { + "epoch": 0.04, + "learning_rate": 1.998858711059381e-05, + "loss": 1.0462, + "step": 267 + }, + { + "epoch": 0.04, + "learning_rate": 1.998832630373639e-05, + "loss": 1.0561, + "step": 268 + }, + { + "epoch": 0.05, + "learning_rate": 1.9988062552215983e-05, + "loss": 0.99, + "step": 269 + }, + { + "epoch": 0.05, + "learning_rate": 1.9987795856110347e-05, + "loss": 1.0551, + "step": 270 + }, + { + "epoch": 0.05, + "learning_rate": 1.99875262154981e-05, + "loss": 1.0713, + "step": 271 + }, + { + "epoch": 0.05, + "learning_rate": 1.9987253630458738e-05, + "loss": 1.0204, + "step": 272 + }, + { + "epoch": 0.05, + "learning_rate": 1.9986978101072627e-05, + "loss": 1.0142, + "step": 273 + }, + { + "epoch": 0.05, + "learning_rate": 1.9986699627420987e-05, + "loss": 1.0011, + "step": 274 + }, + { + "epoch": 0.05, + "learning_rate": 1.998641820958592e-05, + "loss": 0.944, + "step": 275 + }, + { + "epoch": 0.05, + "learning_rate": 1.9986133847650392e-05, + "loss": 0.9923, + "step": 276 + }, + { + "epoch": 0.05, + "learning_rate": 1.9985846541698234e-05, + "loss": 1.0176, + "step": 277 + }, + { + "epoch": 0.05, + "learning_rate": 1.9985556291814147e-05, + "loss": 1.0848, + "step": 278 + }, + { + "epoch": 0.05, + "learning_rate": 1.99852630980837e-05, + "loss": 1.0405, + "step": 279 + }, + { + "epoch": 0.05, + "learning_rate": 1.998496696059333e-05, + "loss": 1.0541, + "step": 280 + }, + { + "epoch": 0.05, + "learning_rate": 1.9984667879430336e-05, + "loss": 1.0211, + "step": 281 + }, + { + "epoch": 0.05, + "learning_rate": 1.99843658546829e-05, + "loss": 1.0575, + "step": 282 + }, + { + "epoch": 0.05, + "learning_rate": 1.998406088644006e-05, + "loss": 0.3278, + "step": 283 + }, + { + "epoch": 0.05, + "learning_rate": 1.9983752974791715e-05, + "loss": 0.9662, + "step": 284 + }, + { + "epoch": 0.05, + "learning_rate": 1.9983442119828647e-05, + "loss": 1.0602, + "step": 285 + }, + { + "epoch": 0.05, + "learning_rate": 1.99831283216425e-05, + "loss": 1.019, + "step": 286 + }, + { + "epoch": 0.05, + "learning_rate": 1.9982811580325784e-05, + "loss": 0.3145, + "step": 287 + }, + { + "epoch": 0.05, + "learning_rate": 1.9982491895971878e-05, + "loss": 1.0347, + "step": 288 + }, + { + "epoch": 0.05, + "learning_rate": 1.9982169268675024e-05, + "loss": 1.0748, + "step": 289 + }, + { + "epoch": 0.05, + "learning_rate": 1.9981843698530345e-05, + "loss": 0.9802, + "step": 290 + }, + { + "epoch": 0.05, + "learning_rate": 1.9981515185633812e-05, + "loss": 1.0498, + "step": 291 + }, + { + "epoch": 0.05, + "learning_rate": 1.9981183730082283e-05, + "loss": 0.9916, + "step": 292 + }, + { + "epoch": 0.05, + "learning_rate": 1.9980849331973467e-05, + "loss": 1.0553, + "step": 293 + }, + { + "epoch": 0.05, + "learning_rate": 1.9980511991405955e-05, + "loss": 1.0027, + "step": 294 + }, + { + "epoch": 0.05, + "learning_rate": 1.9980171708479194e-05, + "loss": 0.9694, + "step": 295 + }, + { + "epoch": 0.05, + "learning_rate": 1.9979828483293504e-05, + "loss": 1.0036, + "step": 296 + }, + { + "epoch": 0.05, + "learning_rate": 1.997948231595007e-05, + "loss": 1.0311, + "step": 297 + }, + { + "epoch": 0.05, + "learning_rate": 1.9979133206550953e-05, + "loss": 0.3074, + "step": 298 + }, + { + "epoch": 0.05, + "learning_rate": 1.9978781155199062e-05, + "loss": 1.0511, + "step": 299 + }, + { + "epoch": 0.05, + "learning_rate": 1.9978426161998195e-05, + "loss": 1.0506, + "step": 300 + }, + { + "epoch": 0.05, + "learning_rate": 1.9978068227053002e-05, + "loss": 0.9957, + "step": 301 + }, + { + "epoch": 0.05, + "learning_rate": 1.9977707350469012e-05, + "loss": 1.0066, + "step": 302 + }, + { + "epoch": 0.05, + "learning_rate": 1.9977343532352608e-05, + "loss": 0.9582, + "step": 303 + }, + { + "epoch": 0.05, + "learning_rate": 1.9976976772811055e-05, + "loss": 1.023, + "step": 304 + }, + { + "epoch": 0.05, + "learning_rate": 1.997660707195247e-05, + "loss": 1.0091, + "step": 305 + }, + { + "epoch": 0.05, + "learning_rate": 1.997623442988585e-05, + "loss": 0.2919, + "step": 306 + }, + { + "epoch": 0.05, + "learning_rate": 1.9975858846721054e-05, + "loss": 0.9865, + "step": 307 + }, + { + "epoch": 0.05, + "learning_rate": 1.9975480322568802e-05, + "loss": 0.9313, + "step": 308 + }, + { + "epoch": 0.05, + "learning_rate": 1.9975098857540695e-05, + "loss": 0.9569, + "step": 309 + }, + { + "epoch": 0.05, + "learning_rate": 1.9974714451749185e-05, + "loss": 1.0099, + "step": 310 + }, + { + "epoch": 0.05, + "learning_rate": 1.9974327105307605e-05, + "loss": 1.0968, + "step": 311 + }, + { + "epoch": 0.05, + "learning_rate": 1.9973936818330145e-05, + "loss": 1.0231, + "step": 312 + }, + { + "epoch": 0.05, + "learning_rate": 1.9973543590931867e-05, + "loss": 1.0604, + "step": 313 + }, + { + "epoch": 0.05, + "learning_rate": 1.9973147423228698e-05, + "loss": 1.043, + "step": 314 + }, + { + "epoch": 0.05, + "learning_rate": 1.9972748315337434e-05, + "loss": 0.3334, + "step": 315 + }, + { + "epoch": 0.05, + "learning_rate": 1.9972346267375736e-05, + "loss": 1.0126, + "step": 316 + }, + { + "epoch": 0.05, + "learning_rate": 1.997194127946213e-05, + "loss": 1.0066, + "step": 317 + }, + { + "epoch": 0.05, + "learning_rate": 1.9971533351716012e-05, + "loss": 1.0794, + "step": 318 + }, + { + "epoch": 0.05, + "learning_rate": 1.9971122484257643e-05, + "loss": 1.0017, + "step": 319 + }, + { + "epoch": 0.05, + "learning_rate": 1.9970708677208147e-05, + "loss": 0.945, + "step": 320 + }, + { + "epoch": 0.05, + "learning_rate": 1.9970291930689526e-05, + "loss": 1.0478, + "step": 321 + }, + { + "epoch": 0.05, + "learning_rate": 1.9969872244824638e-05, + "loss": 1.0193, + "step": 322 + }, + { + "epoch": 0.05, + "learning_rate": 1.9969449619737212e-05, + "loss": 1.0245, + "step": 323 + }, + { + "epoch": 0.05, + "learning_rate": 1.9969024055551843e-05, + "loss": 1.0132, + "step": 324 + }, + { + "epoch": 0.05, + "learning_rate": 1.9968595552393983e-05, + "loss": 0.9864, + "step": 325 + }, + { + "epoch": 0.05, + "learning_rate": 1.996816411038997e-05, + "loss": 1.0231, + "step": 326 + }, + { + "epoch": 0.05, + "learning_rate": 1.9967729729666993e-05, + "loss": 0.987, + "step": 327 + }, + { + "epoch": 0.05, + "learning_rate": 1.9967292410353117e-05, + "loss": 1.0116, + "step": 328 + }, + { + "epoch": 0.06, + "learning_rate": 1.996685215257726e-05, + "loss": 0.3385, + "step": 329 + }, + { + "epoch": 0.06, + "learning_rate": 1.9966408956469215e-05, + "loss": 1.0269, + "step": 330 + }, + { + "epoch": 0.06, + "learning_rate": 1.996596282215965e-05, + "loss": 1.0321, + "step": 331 + }, + { + "epoch": 0.06, + "learning_rate": 1.996551374978008e-05, + "loss": 1.0462, + "step": 332 + }, + { + "epoch": 0.06, + "learning_rate": 1.9965061739462903e-05, + "loss": 1.1047, + "step": 333 + }, + { + "epoch": 0.06, + "learning_rate": 1.9964606791341373e-05, + "loss": 0.9737, + "step": 334 + }, + { + "epoch": 0.06, + "learning_rate": 1.9964148905549617e-05, + "loss": 1.0513, + "step": 335 + }, + { + "epoch": 0.06, + "learning_rate": 1.996368808222262e-05, + "loss": 0.9591, + "step": 336 + }, + { + "epoch": 0.06, + "learning_rate": 1.996322432149624e-05, + "loss": 1.0149, + "step": 337 + }, + { + "epoch": 0.06, + "learning_rate": 1.9962757623507197e-05, + "loss": 0.9705, + "step": 338 + }, + { + "epoch": 0.06, + "learning_rate": 1.996228798839308e-05, + "loss": 0.9716, + "step": 339 + }, + { + "epoch": 0.06, + "learning_rate": 1.996181541629234e-05, + "loss": 1.0156, + "step": 340 + }, + { + "epoch": 0.06, + "learning_rate": 1.99613399073443e-05, + "loss": 1.0011, + "step": 341 + }, + { + "epoch": 0.06, + "learning_rate": 1.9960861461689146e-05, + "loss": 1.0068, + "step": 342 + }, + { + "epoch": 0.06, + "learning_rate": 1.996038007946792e-05, + "loss": 1.0131, + "step": 343 + }, + { + "epoch": 0.06, + "learning_rate": 1.995989576082255e-05, + "loss": 1.0618, + "step": 344 + }, + { + "epoch": 0.06, + "learning_rate": 1.9959408505895807e-05, + "loss": 1.0406, + "step": 345 + }, + { + "epoch": 0.06, + "learning_rate": 1.9958918314831347e-05, + "loss": 1.083, + "step": 346 + }, + { + "epoch": 0.06, + "learning_rate": 1.995842518777368e-05, + "loss": 1.0579, + "step": 347 + }, + { + "epoch": 0.06, + "learning_rate": 1.9957929124868184e-05, + "loss": 1.0104, + "step": 348 + }, + { + "epoch": 0.06, + "learning_rate": 1.995743012626111e-05, + "loss": 1.096, + "step": 349 + }, + { + "epoch": 0.06, + "learning_rate": 1.995692819209956e-05, + "loss": 1.0562, + "step": 350 + }, + { + "epoch": 0.06, + "learning_rate": 1.9956423322531515e-05, + "loss": 1.056, + "step": 351 + }, + { + "epoch": 0.06, + "learning_rate": 1.995591551770581e-05, + "loss": 1.0834, + "step": 352 + }, + { + "epoch": 0.06, + "learning_rate": 1.9955404777772157e-05, + "loss": 1.0424, + "step": 353 + }, + { + "epoch": 0.06, + "learning_rate": 1.9954891102881126e-05, + "loss": 0.3342, + "step": 354 + }, + { + "epoch": 0.06, + "learning_rate": 1.9954374493184153e-05, + "loss": 0.3423, + "step": 355 + }, + { + "epoch": 0.06, + "learning_rate": 1.9953854948833537e-05, + "loss": 1.0498, + "step": 356 + }, + { + "epoch": 0.06, + "learning_rate": 1.9953332469982453e-05, + "loss": 1.0183, + "step": 357 + }, + { + "epoch": 0.06, + "learning_rate": 1.9952807056784925e-05, + "loss": 1.0293, + "step": 358 + }, + { + "epoch": 0.06, + "learning_rate": 1.9952278709395855e-05, + "loss": 0.285, + "step": 359 + }, + { + "epoch": 0.06, + "learning_rate": 1.9951747427971e-05, + "loss": 1.0454, + "step": 360 + }, + { + "epoch": 0.06, + "learning_rate": 1.9951213212666993e-05, + "loss": 1.0458, + "step": 361 + }, + { + "epoch": 0.06, + "learning_rate": 1.9950676063641325e-05, + "loss": 1.0579, + "step": 362 + }, + { + "epoch": 0.06, + "learning_rate": 1.9950135981052353e-05, + "loss": 1.0489, + "step": 363 + }, + { + "epoch": 0.06, + "learning_rate": 1.99495929650593e-05, + "loss": 1.0549, + "step": 364 + }, + { + "epoch": 0.06, + "learning_rate": 1.994904701582225e-05, + "loss": 1.012, + "step": 365 + }, + { + "epoch": 0.06, + "learning_rate": 1.9948498133502155e-05, + "loss": 0.9953, + "step": 366 + }, + { + "epoch": 0.06, + "learning_rate": 1.9947946318260834e-05, + "loss": 0.327, + "step": 367 + }, + { + "epoch": 0.06, + "learning_rate": 1.9947391570260964e-05, + "loss": 1.0219, + "step": 368 + }, + { + "epoch": 0.06, + "learning_rate": 1.994683388966609e-05, + "loss": 1.0092, + "step": 369 + }, + { + "epoch": 0.06, + "learning_rate": 1.994627327664063e-05, + "loss": 1.0554, + "step": 370 + }, + { + "epoch": 0.06, + "learning_rate": 1.9945709731349852e-05, + "loss": 0.9892, + "step": 371 + }, + { + "epoch": 0.06, + "learning_rate": 1.9945143253959896e-05, + "loss": 0.9698, + "step": 372 + }, + { + "epoch": 0.06, + "learning_rate": 1.9944573844637763e-05, + "loss": 1.0184, + "step": 373 + }, + { + "epoch": 0.06, + "learning_rate": 1.9944001503551326e-05, + "loss": 0.9346, + "step": 374 + }, + { + "epoch": 0.06, + "learning_rate": 1.9943426230869313e-05, + "loss": 0.9874, + "step": 375 + }, + { + "epoch": 0.06, + "learning_rate": 1.9942848026761324e-05, + "loss": 1.0532, + "step": 376 + }, + { + "epoch": 0.06, + "learning_rate": 1.9942266891397817e-05, + "loss": 0.337, + "step": 377 + }, + { + "epoch": 0.06, + "learning_rate": 1.9941682824950117e-05, + "loss": 0.2871, + "step": 378 + }, + { + "epoch": 0.06, + "learning_rate": 1.9941095827590415e-05, + "loss": 1.0189, + "step": 379 + }, + { + "epoch": 0.06, + "learning_rate": 1.994050589949176e-05, + "loss": 1.0422, + "step": 380 + }, + { + "epoch": 0.06, + "learning_rate": 1.9939913040828073e-05, + "loss": 1.0773, + "step": 381 + }, + { + "epoch": 0.06, + "learning_rate": 1.9939317251774134e-05, + "loss": 0.987, + "step": 382 + }, + { + "epoch": 0.06, + "learning_rate": 1.9938718532505584e-05, + "loss": 1.0596, + "step": 383 + }, + { + "epoch": 0.06, + "learning_rate": 1.993811688319894e-05, + "loss": 0.9809, + "step": 384 + }, + { + "epoch": 0.06, + "learning_rate": 1.993751230403156e-05, + "loss": 1.001, + "step": 385 + }, + { + "epoch": 0.06, + "learning_rate": 1.9936904795181696e-05, + "loss": 0.9813, + "step": 386 + }, + { + "epoch": 0.06, + "learning_rate": 1.993629435682844e-05, + "loss": 1.0309, + "step": 387 + }, + { + "epoch": 0.07, + "learning_rate": 1.993568098915176e-05, + "loss": 1.0505, + "step": 388 + }, + { + "epoch": 0.07, + "learning_rate": 1.9935064692332476e-05, + "loss": 0.9289, + "step": 389 + }, + { + "epoch": 0.07, + "learning_rate": 1.9934445466552283e-05, + "loss": 1.0396, + "step": 390 + }, + { + "epoch": 0.07, + "learning_rate": 1.993382331199374e-05, + "loss": 0.9707, + "step": 391 + }, + { + "epoch": 0.07, + "learning_rate": 1.9933198228840254e-05, + "loss": 1.0354, + "step": 392 + }, + { + "epoch": 0.07, + "learning_rate": 1.9932570217276115e-05, + "loss": 1.0367, + "step": 393 + }, + { + "epoch": 0.07, + "learning_rate": 1.9931939277486463e-05, + "loss": 1.019, + "step": 394 + }, + { + "epoch": 0.07, + "learning_rate": 1.9931305409657307e-05, + "loss": 0.9466, + "step": 395 + }, + { + "epoch": 0.07, + "learning_rate": 1.993066861397552e-05, + "loss": 0.9844, + "step": 396 + }, + { + "epoch": 0.07, + "learning_rate": 1.9930028890628832e-05, + "loss": 1.1074, + "step": 397 + }, + { + "epoch": 0.07, + "learning_rate": 1.9929386239805843e-05, + "loss": 0.9911, + "step": 398 + }, + { + "epoch": 0.07, + "learning_rate": 1.992874066169601e-05, + "loss": 1.017, + "step": 399 + }, + { + "epoch": 0.07, + "learning_rate": 1.9928092156489664e-05, + "loss": 1.0471, + "step": 400 + }, + { + "epoch": 0.07, + "learning_rate": 1.992744072437798e-05, + "loss": 1.0741, + "step": 401 + }, + { + "epoch": 0.07, + "learning_rate": 1.992678636555301e-05, + "loss": 0.3642, + "step": 402 + }, + { + "epoch": 0.07, + "learning_rate": 1.9926129080207676e-05, + "loss": 0.9898, + "step": 403 + }, + { + "epoch": 0.07, + "learning_rate": 1.9925468868535743e-05, + "loss": 1.013, + "step": 404 + }, + { + "epoch": 0.07, + "learning_rate": 1.9924805730731847e-05, + "loss": 1.0581, + "step": 405 + }, + { + "epoch": 0.07, + "learning_rate": 1.992413966699149e-05, + "loss": 0.997, + "step": 406 + }, + { + "epoch": 0.07, + "learning_rate": 1.992347067751104e-05, + "loss": 1.0381, + "step": 407 + }, + { + "epoch": 0.07, + "learning_rate": 1.9922798762487715e-05, + "loss": 1.0074, + "step": 408 + }, + { + "epoch": 0.07, + "learning_rate": 1.9922123922119606e-05, + "loss": 0.9511, + "step": 409 + }, + { + "epoch": 0.07, + "learning_rate": 1.9921446156605663e-05, + "loss": 1.0221, + "step": 410 + }, + { + "epoch": 0.07, + "learning_rate": 1.99207654661457e-05, + "loss": 1.0648, + "step": 411 + }, + { + "epoch": 0.07, + "learning_rate": 1.9920081850940382e-05, + "loss": 0.9394, + "step": 412 + }, + { + "epoch": 0.07, + "learning_rate": 1.9919395311191256e-05, + "loss": 1.0119, + "step": 413 + }, + { + "epoch": 0.07, + "learning_rate": 1.991870584710072e-05, + "loss": 1.004, + "step": 414 + }, + { + "epoch": 0.07, + "learning_rate": 1.9918013458872036e-05, + "loss": 0.9808, + "step": 415 + }, + { + "epoch": 0.07, + "learning_rate": 1.991731814670932e-05, + "loss": 1.0073, + "step": 416 + }, + { + "epoch": 0.07, + "learning_rate": 1.9916619910817564e-05, + "loss": 1.0454, + "step": 417 + }, + { + "epoch": 0.07, + "learning_rate": 1.9915918751402615e-05, + "loss": 1.0041, + "step": 418 + }, + { + "epoch": 0.07, + "learning_rate": 1.991521466867118e-05, + "loss": 1.0422, + "step": 419 + }, + { + "epoch": 0.07, + "learning_rate": 1.991450766283083e-05, + "loss": 0.9996, + "step": 420 + }, + { + "epoch": 0.07, + "learning_rate": 1.991379773409e-05, + "loss": 1.0294, + "step": 421 + }, + { + "epoch": 0.07, + "learning_rate": 1.9913084882657978e-05, + "loss": 1.0339, + "step": 422 + }, + { + "epoch": 0.07, + "learning_rate": 1.9912369108744927e-05, + "loss": 1.0093, + "step": 423 + }, + { + "epoch": 0.07, + "learning_rate": 1.9911650412561862e-05, + "loss": 0.9874, + "step": 424 + }, + { + "epoch": 0.07, + "learning_rate": 1.991092879432066e-05, + "loss": 0.9816, + "step": 425 + }, + { + "epoch": 0.07, + "learning_rate": 1.9910204254234068e-05, + "loss": 0.9916, + "step": 426 + }, + { + "epoch": 0.07, + "learning_rate": 1.990947679251568e-05, + "loss": 1.053, + "step": 427 + }, + { + "epoch": 0.07, + "learning_rate": 1.990874640937997e-05, + "loss": 1.0531, + "step": 428 + }, + { + "epoch": 0.07, + "learning_rate": 1.990801310504225e-05, + "loss": 1.0475, + "step": 429 + }, + { + "epoch": 0.07, + "learning_rate": 1.9907276879718715e-05, + "loss": 1.0233, + "step": 430 + }, + { + "epoch": 0.07, + "learning_rate": 1.9906537733626407e-05, + "loss": 1.0297, + "step": 431 + }, + { + "epoch": 0.07, + "learning_rate": 1.9905795666983234e-05, + "loss": 1.0416, + "step": 432 + }, + { + "epoch": 0.07, + "learning_rate": 1.990505068000797e-05, + "loss": 1.0498, + "step": 433 + }, + { + "epoch": 0.07, + "learning_rate": 1.9904302772920238e-05, + "loss": 1.0084, + "step": 434 + }, + { + "epoch": 0.07, + "learning_rate": 1.9903551945940537e-05, + "loss": 1.031, + "step": 435 + }, + { + "epoch": 0.07, + "learning_rate": 1.990279819929021e-05, + "loss": 0.9891, + "step": 436 + }, + { + "epoch": 0.07, + "learning_rate": 1.990204153319147e-05, + "loss": 1.0209, + "step": 437 + }, + { + "epoch": 0.07, + "learning_rate": 1.9901281947867402e-05, + "loss": 1.0202, + "step": 438 + }, + { + "epoch": 0.07, + "learning_rate": 1.9900519443541927e-05, + "loss": 0.9495, + "step": 439 + }, + { + "epoch": 0.07, + "learning_rate": 1.9899754020439843e-05, + "loss": 1.0472, + "step": 440 + }, + { + "epoch": 0.07, + "learning_rate": 1.9898985678786802e-05, + "loss": 0.9953, + "step": 441 + }, + { + "epoch": 0.07, + "learning_rate": 1.989821441880933e-05, + "loss": 0.9958, + "step": 442 + }, + { + "epoch": 0.07, + "learning_rate": 1.989744024073479e-05, + "loss": 1.0218, + "step": 443 + }, + { + "epoch": 0.07, + "learning_rate": 1.9896663144791423e-05, + "loss": 0.9916, + "step": 444 + }, + { + "epoch": 0.07, + "learning_rate": 1.9895883131208325e-05, + "loss": 1.0705, + "step": 445 + }, + { + "epoch": 0.07, + "learning_rate": 1.989510020021545e-05, + "loss": 0.9881, + "step": 446 + }, + { + "epoch": 0.07, + "learning_rate": 1.989431435204362e-05, + "loss": 1.0519, + "step": 447 + }, + { + "epoch": 0.08, + "learning_rate": 1.98935255869245e-05, + "loss": 1.0659, + "step": 448 + }, + { + "epoch": 0.08, + "learning_rate": 1.9892733905090642e-05, + "loss": 1.0336, + "step": 449 + }, + { + "epoch": 0.08, + "learning_rate": 1.989193930677543e-05, + "loss": 0.9237, + "step": 450 + }, + { + "epoch": 0.08, + "learning_rate": 1.989114179221312e-05, + "loss": 0.9928, + "step": 451 + }, + { + "epoch": 0.08, + "learning_rate": 1.9890341361638837e-05, + "loss": 0.9393, + "step": 452 + }, + { + "epoch": 0.08, + "learning_rate": 1.9889538015288545e-05, + "loss": 1.0642, + "step": 453 + }, + { + "epoch": 0.08, + "learning_rate": 1.988873175339909e-05, + "loss": 1.0665, + "step": 454 + }, + { + "epoch": 0.08, + "learning_rate": 1.9887922576208155e-05, + "loss": 0.3874, + "step": 455 + }, + { + "epoch": 0.08, + "learning_rate": 1.9887110483954303e-05, + "loss": 0.9612, + "step": 456 + }, + { + "epoch": 0.08, + "learning_rate": 1.988629547687694e-05, + "loss": 1.0675, + "step": 457 + }, + { + "epoch": 0.08, + "learning_rate": 1.9885477555216346e-05, + "loss": 1.0717, + "step": 458 + }, + { + "epoch": 0.08, + "learning_rate": 1.9884656719213645e-05, + "loss": 1.0396, + "step": 459 + }, + { + "epoch": 0.08, + "learning_rate": 1.9883832969110837e-05, + "loss": 1.0611, + "step": 460 + }, + { + "epoch": 0.08, + "learning_rate": 1.9883006305150767e-05, + "loss": 0.9289, + "step": 461 + }, + { + "epoch": 0.08, + "learning_rate": 1.9882176727577143e-05, + "loss": 0.973, + "step": 462 + }, + { + "epoch": 0.08, + "learning_rate": 1.9881344236634537e-05, + "loss": 0.9627, + "step": 463 + }, + { + "epoch": 0.08, + "learning_rate": 1.988050883256837e-05, + "loss": 1.0181, + "step": 464 + }, + { + "epoch": 0.08, + "learning_rate": 1.9879670515624937e-05, + "loss": 1.0471, + "step": 465 + }, + { + "epoch": 0.08, + "learning_rate": 1.9878829286051374e-05, + "loss": 1.0274, + "step": 466 + }, + { + "epoch": 0.08, + "learning_rate": 1.9877985144095687e-05, + "loss": 1.0441, + "step": 467 + }, + { + "epoch": 0.08, + "learning_rate": 1.987713809000674e-05, + "loss": 1.0032, + "step": 468 + }, + { + "epoch": 0.08, + "learning_rate": 1.9876288124034252e-05, + "loss": 0.97, + "step": 469 + }, + { + "epoch": 0.08, + "learning_rate": 1.9875435246428798e-05, + "loss": 0.3541, + "step": 470 + }, + { + "epoch": 0.08, + "learning_rate": 1.9874579457441824e-05, + "loss": 1.0195, + "step": 471 + }, + { + "epoch": 0.08, + "learning_rate": 1.9873720757325613e-05, + "loss": 1.0177, + "step": 472 + }, + { + "epoch": 0.08, + "learning_rate": 1.9872859146333333e-05, + "loss": 1.0531, + "step": 473 + }, + { + "epoch": 0.08, + "learning_rate": 1.9871994624718985e-05, + "loss": 0.9687, + "step": 474 + }, + { + "epoch": 0.08, + "learning_rate": 1.987112719273745e-05, + "loss": 0.9826, + "step": 475 + }, + { + "epoch": 0.08, + "learning_rate": 1.987025685064444e-05, + "loss": 1.0312, + "step": 476 + }, + { + "epoch": 0.08, + "learning_rate": 1.9869383598696555e-05, + "loss": 1.0378, + "step": 477 + }, + { + "epoch": 0.08, + "learning_rate": 1.9868507437151235e-05, + "loss": 1.024, + "step": 478 + }, + { + "epoch": 0.08, + "learning_rate": 1.9867628366266776e-05, + "loss": 0.9643, + "step": 479 + }, + { + "epoch": 0.08, + "learning_rate": 1.9866746386302345e-05, + "loss": 1.0372, + "step": 480 + }, + { + "epoch": 0.08, + "learning_rate": 1.9865861497517957e-05, + "loss": 1.0114, + "step": 481 + }, + { + "epoch": 0.08, + "learning_rate": 1.9864973700174483e-05, + "loss": 1.0076, + "step": 482 + }, + { + "epoch": 0.08, + "learning_rate": 1.9864082994533658e-05, + "loss": 1.0753, + "step": 483 + }, + { + "epoch": 0.08, + "learning_rate": 1.9863189380858068e-05, + "loss": 1.0451, + "step": 484 + }, + { + "epoch": 0.08, + "learning_rate": 1.9862292859411164e-05, + "loss": 1.0264, + "step": 485 + }, + { + "epoch": 0.08, + "learning_rate": 1.9861393430457248e-05, + "loss": 1.0396, + "step": 486 + }, + { + "epoch": 0.08, + "learning_rate": 1.986049109426148e-05, + "loss": 0.9938, + "step": 487 + }, + { + "epoch": 0.08, + "learning_rate": 1.985958585108988e-05, + "loss": 1.029, + "step": 488 + }, + { + "epoch": 0.08, + "learning_rate": 1.9858677701209324e-05, + "loss": 1.0887, + "step": 489 + }, + { + "epoch": 0.08, + "learning_rate": 1.985776664488754e-05, + "loss": 1.044, + "step": 490 + }, + { + "epoch": 0.08, + "learning_rate": 1.985685268239312e-05, + "loss": 0.9561, + "step": 491 + }, + { + "epoch": 0.08, + "learning_rate": 1.9855935813995504e-05, + "loss": 0.9677, + "step": 492 + }, + { + "epoch": 0.08, + "learning_rate": 1.9855016039965006e-05, + "loss": 1.0331, + "step": 493 + }, + { + "epoch": 0.08, + "learning_rate": 1.9854093360572775e-05, + "loss": 0.9753, + "step": 494 + }, + { + "epoch": 0.08, + "learning_rate": 1.9853167776090832e-05, + "loss": 1.0164, + "step": 495 + }, + { + "epoch": 0.08, + "learning_rate": 1.9852239286792047e-05, + "loss": 0.8929, + "step": 496 + }, + { + "epoch": 0.08, + "learning_rate": 1.9851307892950146e-05, + "loss": 1.0278, + "step": 497 + }, + { + "epoch": 0.08, + "learning_rate": 1.9850373594839717e-05, + "loss": 1.0317, + "step": 498 + }, + { + "epoch": 0.08, + "learning_rate": 1.98494363927362e-05, + "loss": 0.9874, + "step": 499 + }, + { + "epoch": 0.08, + "learning_rate": 1.9848496286915894e-05, + "loss": 0.9883, + "step": 500 + }, + { + "epoch": 0.08, + "learning_rate": 1.9847553277655948e-05, + "loss": 0.9818, + "step": 501 + }, + { + "epoch": 0.08, + "learning_rate": 1.984660736523437e-05, + "loss": 0.9902, + "step": 502 + }, + { + "epoch": 0.08, + "learning_rate": 1.984565854993003e-05, + "loss": 1.0752, + "step": 503 + }, + { + "epoch": 0.08, + "learning_rate": 1.9844706832022646e-05, + "loss": 0.9733, + "step": 504 + }, + { + "epoch": 0.08, + "learning_rate": 1.9843752211792792e-05, + "loss": 1.0121, + "step": 505 + }, + { + "epoch": 0.08, + "learning_rate": 1.9842794689521905e-05, + "loss": 1.0301, + "step": 506 + }, + { + "epoch": 0.08, + "learning_rate": 1.984183426549227e-05, + "loss": 0.9623, + "step": 507 + }, + { + "epoch": 0.09, + "learning_rate": 1.984087093998703e-05, + "loss": 0.9986, + "step": 508 + }, + { + "epoch": 0.09, + "learning_rate": 1.9839904713290186e-05, + "loss": 0.9726, + "step": 509 + }, + { + "epoch": 0.09, + "learning_rate": 1.9838935585686586e-05, + "loss": 0.9634, + "step": 510 + }, + { + "epoch": 0.09, + "learning_rate": 1.9837963557461945e-05, + "loss": 0.939, + "step": 511 + }, + { + "epoch": 0.09, + "learning_rate": 1.983698862890282e-05, + "loss": 0.9631, + "step": 512 + }, + { + "epoch": 0.09, + "learning_rate": 1.9836010800296642e-05, + "loss": 1.0101, + "step": 513 + }, + { + "epoch": 0.09, + "learning_rate": 1.9835030071931672e-05, + "loss": 1.0479, + "step": 514 + }, + { + "epoch": 0.09, + "learning_rate": 1.9834046444097046e-05, + "loss": 0.9875, + "step": 515 + }, + { + "epoch": 0.09, + "learning_rate": 1.9833059917082744e-05, + "loss": 0.9789, + "step": 516 + }, + { + "epoch": 0.09, + "learning_rate": 1.983207049117961e-05, + "loss": 0.9597, + "step": 517 + }, + { + "epoch": 0.09, + "learning_rate": 1.9831078166679334e-05, + "loss": 0.9932, + "step": 518 + }, + { + "epoch": 0.09, + "learning_rate": 1.9830082943874464e-05, + "loss": 0.9835, + "step": 519 + }, + { + "epoch": 0.09, + "learning_rate": 1.98290848230584e-05, + "loss": 0.9366, + "step": 520 + }, + { + "epoch": 0.09, + "learning_rate": 1.98280838045254e-05, + "loss": 1.0266, + "step": 521 + }, + { + "epoch": 0.09, + "learning_rate": 1.9827079888570573e-05, + "loss": 1.0111, + "step": 522 + }, + { + "epoch": 0.09, + "learning_rate": 1.982607307548989e-05, + "loss": 1.0051, + "step": 523 + }, + { + "epoch": 0.09, + "learning_rate": 1.9825063365580165e-05, + "loss": 1.0782, + "step": 524 + }, + { + "epoch": 0.09, + "learning_rate": 1.9824050759139068e-05, + "loss": 1.016, + "step": 525 + }, + { + "epoch": 0.09, + "learning_rate": 1.9823035256465136e-05, + "loss": 0.9525, + "step": 526 + }, + { + "epoch": 0.09, + "learning_rate": 1.982201685785774e-05, + "loss": 0.9766, + "step": 527 + }, + { + "epoch": 0.09, + "learning_rate": 1.9820995563617117e-05, + "loss": 0.987, + "step": 528 + }, + { + "epoch": 0.09, + "learning_rate": 1.9819971374044356e-05, + "loss": 1.0113, + "step": 529 + }, + { + "epoch": 0.09, + "learning_rate": 1.9818944289441402e-05, + "loss": 1.0075, + "step": 530 + }, + { + "epoch": 0.09, + "learning_rate": 1.9817914310111044e-05, + "loss": 0.9639, + "step": 531 + }, + { + "epoch": 0.09, + "learning_rate": 1.981688143635694e-05, + "loss": 1.0275, + "step": 532 + }, + { + "epoch": 0.09, + "learning_rate": 1.981584566848358e-05, + "loss": 1.0182, + "step": 533 + }, + { + "epoch": 0.09, + "learning_rate": 1.981480700679633e-05, + "loss": 1.0575, + "step": 534 + }, + { + "epoch": 0.09, + "learning_rate": 1.981376545160139e-05, + "loss": 1.0343, + "step": 535 + }, + { + "epoch": 0.09, + "learning_rate": 1.9812721003205825e-05, + "loss": 1.0823, + "step": 536 + }, + { + "epoch": 0.09, + "learning_rate": 1.981167366191755e-05, + "loss": 1.0282, + "step": 537 + }, + { + "epoch": 0.09, + "learning_rate": 1.981062342804533e-05, + "loss": 0.9715, + "step": 538 + }, + { + "epoch": 0.09, + "learning_rate": 1.980957030189879e-05, + "loss": 1.0787, + "step": 539 + }, + { + "epoch": 0.09, + "learning_rate": 1.9808514283788398e-05, + "loss": 1.0498, + "step": 540 + }, + { + "epoch": 0.09, + "learning_rate": 1.9807455374025477e-05, + "loss": 0.932, + "step": 541 + }, + { + "epoch": 0.09, + "learning_rate": 1.980639357292221e-05, + "loss": 1.0468, + "step": 542 + }, + { + "epoch": 0.09, + "learning_rate": 1.9805328880791626e-05, + "loss": 1.0305, + "step": 543 + }, + { + "epoch": 0.09, + "learning_rate": 1.9804261297947606e-05, + "loss": 1.0071, + "step": 544 + }, + { + "epoch": 0.09, + "learning_rate": 1.980319082470488e-05, + "loss": 0.9821, + "step": 545 + }, + { + "epoch": 0.09, + "learning_rate": 1.9802117461379047e-05, + "loss": 0.9816, + "step": 546 + }, + { + "epoch": 0.09, + "learning_rate": 1.9801041208286535e-05, + "loss": 0.9703, + "step": 547 + }, + { + "epoch": 0.09, + "learning_rate": 1.9799962065744634e-05, + "loss": 0.9333, + "step": 548 + }, + { + "epoch": 0.09, + "learning_rate": 1.9798880034071493e-05, + "loss": 1.0102, + "step": 549 + }, + { + "epoch": 0.09, + "learning_rate": 1.9797795113586104e-05, + "loss": 1.0175, + "step": 550 + }, + { + "epoch": 0.09, + "learning_rate": 1.9796707304608314e-05, + "loss": 0.9981, + "step": 551 + }, + { + "epoch": 0.09, + "learning_rate": 1.9795616607458817e-05, + "loss": 0.9836, + "step": 552 + }, + { + "epoch": 0.09, + "learning_rate": 1.9794523022459168e-05, + "loss": 0.9897, + "step": 553 + }, + { + "epoch": 0.09, + "learning_rate": 1.9793426549931757e-05, + "loss": 0.9961, + "step": 554 + }, + { + "epoch": 0.09, + "learning_rate": 1.9792327190199843e-05, + "loss": 0.9787, + "step": 555 + }, + { + "epoch": 0.09, + "learning_rate": 1.979122494358753e-05, + "loss": 0.9851, + "step": 556 + }, + { + "epoch": 0.09, + "learning_rate": 1.979011981041977e-05, + "loss": 0.9846, + "step": 557 + }, + { + "epoch": 0.09, + "learning_rate": 1.9789011791022365e-05, + "loss": 1.1081, + "step": 558 + }, + { + "epoch": 0.09, + "learning_rate": 1.978790088572197e-05, + "loss": 1.0245, + "step": 559 + }, + { + "epoch": 0.09, + "learning_rate": 1.9786787094846097e-05, + "loss": 1.0294, + "step": 560 + }, + { + "epoch": 0.09, + "learning_rate": 1.97856704187231e-05, + "loss": 0.9407, + "step": 561 + }, + { + "epoch": 0.09, + "learning_rate": 1.978455085768219e-05, + "loss": 0.9938, + "step": 562 + }, + { + "epoch": 0.09, + "learning_rate": 1.9783428412053422e-05, + "loss": 0.9948, + "step": 563 + }, + { + "epoch": 0.09, + "learning_rate": 1.9782303082167705e-05, + "loss": 0.9775, + "step": 564 + }, + { + "epoch": 0.09, + "learning_rate": 1.9781174868356797e-05, + "loss": 1.0009, + "step": 565 + }, + { + "epoch": 0.09, + "learning_rate": 1.9780043770953306e-05, + "loss": 1.0636, + "step": 566 + }, + { + "epoch": 0.1, + "learning_rate": 1.9778909790290697e-05, + "loss": 0.9688, + "step": 567 + }, + { + "epoch": 0.1, + "learning_rate": 1.9777772926703276e-05, + "loss": 1.0079, + "step": 568 + }, + { + "epoch": 0.1, + "learning_rate": 1.97766331805262e-05, + "loss": 1.0206, + "step": 569 + }, + { + "epoch": 0.1, + "learning_rate": 1.9775490552095485e-05, + "loss": 1.0417, + "step": 570 + }, + { + "epoch": 0.1, + "learning_rate": 1.9774345041747982e-05, + "loss": 1.0096, + "step": 571 + }, + { + "epoch": 0.1, + "learning_rate": 1.9773196649821405e-05, + "loss": 1.0062, + "step": 572 + }, + { + "epoch": 0.1, + "learning_rate": 1.9772045376654308e-05, + "loss": 1.0222, + "step": 573 + }, + { + "epoch": 0.1, + "learning_rate": 1.97708912225861e-05, + "loss": 0.9609, + "step": 574 + }, + { + "epoch": 0.1, + "learning_rate": 1.976973418795704e-05, + "loss": 1.0118, + "step": 575 + }, + { + "epoch": 0.1, + "learning_rate": 1.9768574273108228e-05, + "loss": 0.9747, + "step": 576 + }, + { + "epoch": 0.1, + "learning_rate": 1.9767411478381623e-05, + "loss": 1.0491, + "step": 577 + }, + { + "epoch": 0.1, + "learning_rate": 1.9766245804120032e-05, + "loss": 0.324, + "step": 578 + }, + { + "epoch": 0.1, + "learning_rate": 1.9765077250667104e-05, + "loss": 1.0357, + "step": 579 + }, + { + "epoch": 0.1, + "learning_rate": 1.976390581836734e-05, + "loss": 0.9553, + "step": 580 + }, + { + "epoch": 0.1, + "learning_rate": 1.9762731507566094e-05, + "loss": 0.9861, + "step": 581 + }, + { + "epoch": 0.1, + "learning_rate": 1.976155431860956e-05, + "loss": 0.9994, + "step": 582 + }, + { + "epoch": 0.1, + "learning_rate": 1.9760374251844793e-05, + "loss": 0.9834, + "step": 583 + }, + { + "epoch": 0.1, + "learning_rate": 1.9759191307619677e-05, + "loss": 0.961, + "step": 584 + }, + { + "epoch": 0.1, + "learning_rate": 1.975800548628297e-05, + "loss": 1.0598, + "step": 585 + }, + { + "epoch": 0.1, + "learning_rate": 1.975681678818426e-05, + "loss": 1.022, + "step": 586 + }, + { + "epoch": 0.1, + "learning_rate": 1.9755625213673984e-05, + "loss": 1.0112, + "step": 587 + }, + { + "epoch": 0.1, + "learning_rate": 1.9754430763103428e-05, + "loss": 1.045, + "step": 588 + }, + { + "epoch": 0.1, + "learning_rate": 1.975323343682474e-05, + "loss": 0.9917, + "step": 589 + }, + { + "epoch": 0.1, + "learning_rate": 1.9752033235190895e-05, + "loss": 1.0134, + "step": 590 + }, + { + "epoch": 0.1, + "learning_rate": 1.9750830158555728e-05, + "loss": 0.9918, + "step": 591 + }, + { + "epoch": 0.1, + "learning_rate": 1.9749624207273916e-05, + "loss": 1.004, + "step": 592 + }, + { + "epoch": 0.1, + "learning_rate": 1.9748415381700988e-05, + "loss": 0.9832, + "step": 593 + }, + { + "epoch": 0.1, + "learning_rate": 1.974720368219332e-05, + "loss": 1.0243, + "step": 594 + }, + { + "epoch": 0.1, + "learning_rate": 1.9745989109108134e-05, + "loss": 1.0264, + "step": 595 + }, + { + "epoch": 0.1, + "learning_rate": 1.9744771662803496e-05, + "loss": 0.9747, + "step": 596 + }, + { + "epoch": 0.1, + "learning_rate": 1.9743551343638324e-05, + "loss": 1.0094, + "step": 597 + }, + { + "epoch": 0.1, + "learning_rate": 1.974232815197238e-05, + "loss": 1.0321, + "step": 598 + }, + { + "epoch": 0.1, + "learning_rate": 1.974110208816627e-05, + "loss": 1.013, + "step": 599 + }, + { + "epoch": 0.1, + "learning_rate": 1.9739873152581456e-05, + "loss": 0.9432, + "step": 600 + }, + { + "epoch": 0.1, + "learning_rate": 1.9738641345580237e-05, + "loss": 0.9811, + "step": 601 + }, + { + "epoch": 0.1, + "learning_rate": 1.9737406667525766e-05, + "loss": 1.0407, + "step": 602 + }, + { + "epoch": 0.1, + "learning_rate": 1.973616911878204e-05, + "loss": 1.0158, + "step": 603 + }, + { + "epoch": 0.1, + "learning_rate": 1.9734928699713897e-05, + "loss": 1.0505, + "step": 604 + }, + { + "epoch": 0.1, + "learning_rate": 1.9733685410687027e-05, + "loss": 1.0324, + "step": 605 + }, + { + "epoch": 0.1, + "learning_rate": 1.9732439252067967e-05, + "loss": 1.0363, + "step": 606 + }, + { + "epoch": 0.1, + "learning_rate": 1.973119022422409e-05, + "loss": 0.9674, + "step": 607 + }, + { + "epoch": 0.1, + "learning_rate": 1.9729938327523635e-05, + "loss": 0.9479, + "step": 608 + }, + { + "epoch": 0.1, + "learning_rate": 1.9728683562335663e-05, + "loss": 0.9959, + "step": 609 + }, + { + "epoch": 0.1, + "learning_rate": 1.97274259290301e-05, + "loss": 1.0289, + "step": 610 + }, + { + "epoch": 0.1, + "learning_rate": 1.97261654279777e-05, + "loss": 1.007, + "step": 611 + }, + { + "epoch": 0.1, + "learning_rate": 1.972490205955008e-05, + "loss": 0.9964, + "step": 612 + }, + { + "epoch": 0.1, + "learning_rate": 1.972363582411969e-05, + "loss": 1.0354, + "step": 613 + }, + { + "epoch": 0.1, + "learning_rate": 1.9722366722059836e-05, + "loss": 1.0724, + "step": 614 + }, + { + "epoch": 0.1, + "learning_rate": 1.9721094753744655e-05, + "loss": 1.005, + "step": 615 + }, + { + "epoch": 0.1, + "learning_rate": 1.9719819919549138e-05, + "loss": 0.9833, + "step": 616 + }, + { + "epoch": 0.1, + "learning_rate": 1.9718542219849122e-05, + "loss": 1.0245, + "step": 617 + }, + { + "epoch": 0.1, + "learning_rate": 1.9717261655021286e-05, + "loss": 0.9156, + "step": 618 + }, + { + "epoch": 0.1, + "learning_rate": 1.9715978225443147e-05, + "loss": 1.0126, + "step": 619 + }, + { + "epoch": 0.1, + "learning_rate": 1.9714691931493087e-05, + "loss": 0.339, + "step": 620 + }, + { + "epoch": 0.1, + "learning_rate": 1.9713402773550307e-05, + "loss": 0.905, + "step": 621 + }, + { + "epoch": 0.1, + "learning_rate": 1.971211075199487e-05, + "loss": 0.3343, + "step": 622 + }, + { + "epoch": 0.1, + "learning_rate": 1.971081586720767e-05, + "loss": 0.9834, + "step": 623 + }, + { + "epoch": 0.1, + "learning_rate": 1.9709518119570465e-05, + "loss": 0.9719, + "step": 624 + }, + { + "epoch": 0.1, + "learning_rate": 1.9708217509465837e-05, + "loss": 1.0122, + "step": 625 + }, + { + "epoch": 0.1, + "learning_rate": 1.970691403727722e-05, + "loss": 0.98, + "step": 626 + }, + { + "epoch": 0.11, + "learning_rate": 1.970560770338889e-05, + "loss": 0.9678, + "step": 627 + }, + { + "epoch": 0.11, + "learning_rate": 1.9704298508185973e-05, + "loss": 1.0243, + "step": 628 + }, + { + "epoch": 0.11, + "learning_rate": 1.9702986452054426e-05, + "loss": 0.9354, + "step": 629 + }, + { + "epoch": 0.11, + "learning_rate": 1.9701671535381064e-05, + "loss": 1.0951, + "step": 630 + }, + { + "epoch": 0.11, + "learning_rate": 1.9700353758553536e-05, + "loss": 1.0916, + "step": 631 + }, + { + "epoch": 0.11, + "learning_rate": 1.9699033121960333e-05, + "loss": 0.9168, + "step": 632 + }, + { + "epoch": 0.11, + "learning_rate": 1.9697709625990793e-05, + "loss": 0.9704, + "step": 633 + }, + { + "epoch": 0.11, + "learning_rate": 1.96963832710351e-05, + "loss": 1.0101, + "step": 634 + }, + { + "epoch": 0.11, + "learning_rate": 1.969505405748428e-05, + "loss": 0.9643, + "step": 635 + }, + { + "epoch": 0.11, + "learning_rate": 1.9693721985730186e-05, + "loss": 1.0397, + "step": 636 + }, + { + "epoch": 0.11, + "learning_rate": 1.969238705616554e-05, + "loss": 1.0173, + "step": 637 + }, + { + "epoch": 0.11, + "learning_rate": 1.969104926918389e-05, + "loss": 0.9693, + "step": 638 + }, + { + "epoch": 0.11, + "learning_rate": 1.9689708625179627e-05, + "loss": 0.3512, + "step": 639 + }, + { + "epoch": 0.11, + "learning_rate": 1.9688365124547986e-05, + "loss": 0.9983, + "step": 640 + }, + { + "epoch": 0.11, + "learning_rate": 1.9687018767685048e-05, + "loss": 0.9385, + "step": 641 + }, + { + "epoch": 0.11, + "learning_rate": 1.9685669554987732e-05, + "loss": 0.9907, + "step": 642 + }, + { + "epoch": 0.11, + "learning_rate": 1.96843174868538e-05, + "loss": 0.9852, + "step": 643 + }, + { + "epoch": 0.11, + "learning_rate": 1.9682962563681857e-05, + "loss": 0.8984, + "step": 644 + }, + { + "epoch": 0.11, + "learning_rate": 1.9681604785871347e-05, + "loss": 1.0278, + "step": 645 + }, + { + "epoch": 0.11, + "learning_rate": 1.9680244153822558e-05, + "loss": 0.9556, + "step": 646 + }, + { + "epoch": 0.11, + "learning_rate": 1.9678880667936617e-05, + "loss": 0.9063, + "step": 647 + }, + { + "epoch": 0.11, + "learning_rate": 1.9677514328615496e-05, + "loss": 1.0669, + "step": 648 + }, + { + "epoch": 0.11, + "learning_rate": 1.9676145136262007e-05, + "loss": 0.9915, + "step": 649 + }, + { + "epoch": 0.11, + "learning_rate": 1.9674773091279794e-05, + "loss": 1.0251, + "step": 650 + }, + { + "epoch": 0.11, + "learning_rate": 1.967339819407336e-05, + "loss": 1.0284, + "step": 651 + }, + { + "epoch": 0.11, + "learning_rate": 1.9672020445048036e-05, + "loss": 0.3437, + "step": 652 + }, + { + "epoch": 0.11, + "learning_rate": 1.9670639844609997e-05, + "loss": 1.0384, + "step": 653 + }, + { + "epoch": 0.11, + "learning_rate": 1.9669256393166258e-05, + "loss": 0.9198, + "step": 654 + }, + { + "epoch": 0.11, + "learning_rate": 1.966787009112467e-05, + "loss": 0.9665, + "step": 655 + }, + { + "epoch": 0.11, + "learning_rate": 1.966648093889394e-05, + "loss": 1.0513, + "step": 656 + }, + { + "epoch": 0.11, + "learning_rate": 1.9665088936883596e-05, + "loss": 0.9794, + "step": 657 + }, + { + "epoch": 0.11, + "learning_rate": 1.966369408550402e-05, + "loss": 0.976, + "step": 658 + }, + { + "epoch": 0.11, + "learning_rate": 1.9662296385166422e-05, + "loss": 0.975, + "step": 659 + }, + { + "epoch": 0.11, + "learning_rate": 1.9660895836282866e-05, + "loss": 1.02, + "step": 660 + }, + { + "epoch": 0.11, + "learning_rate": 1.965949243926624e-05, + "loss": 0.9812, + "step": 661 + }, + { + "epoch": 0.11, + "learning_rate": 1.965808619453029e-05, + "loss": 0.9644, + "step": 662 + }, + { + "epoch": 0.11, + "learning_rate": 1.965667710248959e-05, + "loss": 1.0136, + "step": 663 + }, + { + "epoch": 0.11, + "learning_rate": 1.9655265163559547e-05, + "loss": 1.0403, + "step": 664 + }, + { + "epoch": 0.11, + "learning_rate": 1.965385037815642e-05, + "loss": 1.0488, + "step": 665 + }, + { + "epoch": 0.11, + "learning_rate": 1.9652432746697306e-05, + "loss": 1.0124, + "step": 666 + }, + { + "epoch": 0.11, + "learning_rate": 1.9651012269600133e-05, + "loss": 0.3474, + "step": 667 + }, + { + "epoch": 0.11, + "learning_rate": 1.9649588947283674e-05, + "loss": 0.96, + "step": 668 + }, + { + "epoch": 0.11, + "learning_rate": 1.9648162780167542e-05, + "loss": 0.9739, + "step": 669 + }, + { + "epoch": 0.11, + "learning_rate": 1.964673376867218e-05, + "loss": 1.0296, + "step": 670 + }, + { + "epoch": 0.11, + "learning_rate": 1.9645301913218878e-05, + "loss": 1.0259, + "step": 671 + }, + { + "epoch": 0.11, + "learning_rate": 1.9643867214229764e-05, + "loss": 1.0523, + "step": 672 + }, + { + "epoch": 0.11, + "learning_rate": 1.9642429672127802e-05, + "loss": 1.0205, + "step": 673 + }, + { + "epoch": 0.11, + "learning_rate": 1.9640989287336795e-05, + "loss": 0.961, + "step": 674 + }, + { + "epoch": 0.11, + "learning_rate": 1.963954606028138e-05, + "loss": 0.9697, + "step": 675 + }, + { + "epoch": 0.11, + "learning_rate": 1.9638099991387038e-05, + "loss": 0.9801, + "step": 676 + }, + { + "epoch": 0.11, + "learning_rate": 1.963665108108008e-05, + "loss": 0.9982, + "step": 677 + }, + { + "epoch": 0.11, + "learning_rate": 1.9635199329787672e-05, + "loss": 0.9928, + "step": 678 + }, + { + "epoch": 0.11, + "learning_rate": 1.9633744737937796e-05, + "loss": 0.9881, + "step": 679 + }, + { + "epoch": 0.11, + "learning_rate": 1.9632287305959278e-05, + "loss": 1.025, + "step": 680 + }, + { + "epoch": 0.11, + "learning_rate": 1.9630827034281794e-05, + "loss": 1.0273, + "step": 681 + }, + { + "epoch": 0.11, + "learning_rate": 1.962936392333584e-05, + "loss": 0.3631, + "step": 682 + }, + { + "epoch": 0.11, + "learning_rate": 1.962789797355276e-05, + "loss": 0.9959, + "step": 683 + }, + { + "epoch": 0.11, + "learning_rate": 1.962642918536473e-05, + "loss": 0.9844, + "step": 684 + }, + { + "epoch": 0.11, + "learning_rate": 1.9624957559204763e-05, + "loss": 1.0361, + "step": 685 + }, + { + "epoch": 0.12, + "learning_rate": 1.962348309550671e-05, + "loss": 1.019, + "step": 686 + }, + { + "epoch": 0.12, + "learning_rate": 1.962200579470526e-05, + "loss": 0.9731, + "step": 687 + }, + { + "epoch": 0.12, + "learning_rate": 1.9620525657235938e-05, + "loss": 0.9822, + "step": 688 + }, + { + "epoch": 0.12, + "learning_rate": 1.96190426835351e-05, + "loss": 1.039, + "step": 689 + }, + { + "epoch": 0.12, + "learning_rate": 1.9617556874039945e-05, + "loss": 1.0118, + "step": 690 + }, + { + "epoch": 0.12, + "learning_rate": 1.9616068229188507e-05, + "loss": 1.0062, + "step": 691 + }, + { + "epoch": 0.12, + "learning_rate": 1.961457674941965e-05, + "loss": 1.0064, + "step": 692 + }, + { + "epoch": 0.12, + "learning_rate": 1.9613082435173078e-05, + "loss": 1.0422, + "step": 693 + }, + { + "epoch": 0.12, + "learning_rate": 1.961158528688933e-05, + "loss": 0.9952, + "step": 694 + }, + { + "epoch": 0.12, + "learning_rate": 1.9610085305009784e-05, + "loss": 0.3218, + "step": 695 + }, + { + "epoch": 0.12, + "learning_rate": 1.960858248997665e-05, + "loss": 1.0189, + "step": 696 + }, + { + "epoch": 0.12, + "learning_rate": 1.960707684223297e-05, + "loss": 0.9823, + "step": 697 + }, + { + "epoch": 0.12, + "learning_rate": 1.9605568362222632e-05, + "loss": 1.0163, + "step": 698 + }, + { + "epoch": 0.12, + "learning_rate": 1.9604057050390342e-05, + "loss": 0.3396, + "step": 699 + }, + { + "epoch": 0.12, + "learning_rate": 1.9602542907181657e-05, + "loss": 1.0044, + "step": 700 + }, + { + "epoch": 0.12, + "learning_rate": 1.9601025933042962e-05, + "loss": 1.0067, + "step": 701 + }, + { + "epoch": 0.12, + "learning_rate": 1.959950612842147e-05, + "loss": 0.3818, + "step": 702 + }, + { + "epoch": 0.12, + "learning_rate": 1.959798349376525e-05, + "loss": 0.9973, + "step": 703 + }, + { + "epoch": 0.12, + "learning_rate": 1.9596458029523174e-05, + "loss": 0.9744, + "step": 704 + }, + { + "epoch": 0.12, + "learning_rate": 1.9594929736144978e-05, + "loss": 1.0472, + "step": 705 + }, + { + "epoch": 0.12, + "learning_rate": 1.9593398614081206e-05, + "loss": 1.068, + "step": 706 + }, + { + "epoch": 0.12, + "learning_rate": 1.959186466378326e-05, + "loss": 0.9993, + "step": 707 + }, + { + "epoch": 0.12, + "learning_rate": 1.959032788570336e-05, + "loss": 1.0164, + "step": 708 + }, + { + "epoch": 0.12, + "learning_rate": 1.9588788280294568e-05, + "loss": 1.0012, + "step": 709 + }, + { + "epoch": 0.12, + "learning_rate": 1.9587245848010766e-05, + "loss": 1.0362, + "step": 710 + }, + { + "epoch": 0.12, + "learning_rate": 1.9585700589306688e-05, + "loss": 0.9355, + "step": 711 + }, + { + "epoch": 0.12, + "learning_rate": 1.9584152504637893e-05, + "loss": 0.9769, + "step": 712 + }, + { + "epoch": 0.12, + "learning_rate": 1.9582601594460767e-05, + "loss": 0.9713, + "step": 713 + }, + { + "epoch": 0.12, + "learning_rate": 1.958104785923254e-05, + "loss": 0.976, + "step": 714 + }, + { + "epoch": 0.12, + "learning_rate": 1.9579491299411263e-05, + "loss": 1.0317, + "step": 715 + }, + { + "epoch": 0.12, + "learning_rate": 1.9577931915455827e-05, + "loss": 0.9741, + "step": 716 + }, + { + "epoch": 0.12, + "learning_rate": 1.9576369707825962e-05, + "loss": 0.9541, + "step": 717 + }, + { + "epoch": 0.12, + "learning_rate": 1.9574804676982215e-05, + "loss": 0.9528, + "step": 718 + }, + { + "epoch": 0.12, + "learning_rate": 1.957323682338598e-05, + "loss": 0.9474, + "step": 719 + }, + { + "epoch": 0.12, + "learning_rate": 1.957166614749947e-05, + "loss": 0.9423, + "step": 720 + }, + { + "epoch": 0.12, + "learning_rate": 1.9570092649785746e-05, + "loss": 0.9924, + "step": 721 + }, + { + "epoch": 0.12, + "learning_rate": 1.956851633070868e-05, + "loss": 0.9478, + "step": 722 + }, + { + "epoch": 0.12, + "learning_rate": 1.9566937190732994e-05, + "loss": 1.0685, + "step": 723 + }, + { + "epoch": 0.12, + "learning_rate": 1.9565355230324238e-05, + "loss": 1.0454, + "step": 724 + }, + { + "epoch": 0.12, + "learning_rate": 1.9563770449948782e-05, + "loss": 0.3429, + "step": 725 + }, + { + "epoch": 0.12, + "learning_rate": 1.9562182850073844e-05, + "loss": 1.0326, + "step": 726 + }, + { + "epoch": 0.12, + "learning_rate": 1.9560592431167456e-05, + "loss": 0.9642, + "step": 727 + }, + { + "epoch": 0.12, + "learning_rate": 1.95589991936985e-05, + "loss": 0.3438, + "step": 728 + }, + { + "epoch": 0.12, + "learning_rate": 1.9557403138136672e-05, + "loss": 0.9815, + "step": 729 + }, + { + "epoch": 0.12, + "learning_rate": 1.955580426495251e-05, + "loss": 0.975, + "step": 730 + }, + { + "epoch": 0.12, + "learning_rate": 1.9554202574617375e-05, + "loss": 0.9863, + "step": 731 + }, + { + "epoch": 0.12, + "learning_rate": 1.9552598067603466e-05, + "loss": 1.0261, + "step": 732 + }, + { + "epoch": 0.12, + "learning_rate": 1.9550990744383805e-05, + "loss": 0.9819, + "step": 733 + }, + { + "epoch": 0.12, + "learning_rate": 1.954938060543225e-05, + "loss": 0.9842, + "step": 734 + }, + { + "epoch": 0.12, + "learning_rate": 1.9547767651223486e-05, + "loss": 1.0504, + "step": 735 + }, + { + "epoch": 0.12, + "learning_rate": 1.954615188223303e-05, + "loss": 0.9912, + "step": 736 + }, + { + "epoch": 0.12, + "learning_rate": 1.9544533298937222e-05, + "loss": 0.9793, + "step": 737 + }, + { + "epoch": 0.12, + "learning_rate": 1.9542911901813247e-05, + "loss": 0.3382, + "step": 738 + }, + { + "epoch": 0.12, + "learning_rate": 1.95412876913391e-05, + "loss": 1.0173, + "step": 739 + }, + { + "epoch": 0.12, + "learning_rate": 1.953966066799362e-05, + "loss": 1.0028, + "step": 740 + }, + { + "epoch": 0.12, + "learning_rate": 1.9538030832256468e-05, + "loss": 0.9548, + "step": 741 + }, + { + "epoch": 0.12, + "learning_rate": 1.9536398184608143e-05, + "loss": 1.0263, + "step": 742 + }, + { + "epoch": 0.12, + "learning_rate": 1.953476272552996e-05, + "loss": 1.0099, + "step": 743 + }, + { + "epoch": 0.12, + "learning_rate": 1.9533124455504073e-05, + "loss": 0.994, + "step": 744 + }, + { + "epoch": 0.12, + "learning_rate": 1.9531483375013457e-05, + "loss": 1.0214, + "step": 745 + }, + { + "epoch": 0.13, + "learning_rate": 1.9529839484541925e-05, + "loss": 0.3046, + "step": 746 + }, + { + "epoch": 0.13, + "learning_rate": 1.952819278457411e-05, + "loss": 0.9628, + "step": 747 + }, + { + "epoch": 0.13, + "learning_rate": 1.9526543275595476e-05, + "loss": 1.0409, + "step": 748 + }, + { + "epoch": 0.13, + "learning_rate": 1.952489095809232e-05, + "loss": 0.9826, + "step": 749 + }, + { + "epoch": 0.13, + "learning_rate": 1.9523235832551752e-05, + "loss": 0.3528, + "step": 750 + }, + { + "epoch": 0.13, + "learning_rate": 1.9521577899461732e-05, + "loss": 1.0095, + "step": 751 + }, + { + "epoch": 0.13, + "learning_rate": 1.951991715931103e-05, + "loss": 1.0256, + "step": 752 + }, + { + "epoch": 0.13, + "learning_rate": 1.951825361258925e-05, + "loss": 0.9709, + "step": 753 + }, + { + "epoch": 0.13, + "learning_rate": 1.9516587259786824e-05, + "loss": 1.0211, + "step": 754 + }, + { + "epoch": 0.13, + "learning_rate": 1.9514918101395012e-05, + "loss": 1.0976, + "step": 755 + }, + { + "epoch": 0.13, + "learning_rate": 1.9513246137905896e-05, + "loss": 1.0101, + "step": 756 + }, + { + "epoch": 0.13, + "learning_rate": 1.9511571369812388e-05, + "loss": 1.0113, + "step": 757 + }, + { + "epoch": 0.13, + "learning_rate": 1.9509893797608228e-05, + "loss": 0.9947, + "step": 758 + }, + { + "epoch": 0.13, + "learning_rate": 1.9508213421787987e-05, + "loss": 0.9578, + "step": 759 + }, + { + "epoch": 0.13, + "learning_rate": 1.950653024284705e-05, + "loss": 1.0217, + "step": 760 + }, + { + "epoch": 0.13, + "learning_rate": 1.9504844261281638e-05, + "loss": 0.3692, + "step": 761 + }, + { + "epoch": 0.13, + "learning_rate": 1.9503155477588794e-05, + "loss": 0.9471, + "step": 762 + }, + { + "epoch": 0.13, + "learning_rate": 1.9501463892266396e-05, + "loss": 1.0062, + "step": 763 + }, + { + "epoch": 0.13, + "learning_rate": 1.949976950581314e-05, + "loss": 0.972, + "step": 764 + }, + { + "epoch": 0.13, + "learning_rate": 1.949807231872854e-05, + "loss": 0.9498, + "step": 765 + }, + { + "epoch": 0.13, + "learning_rate": 1.949637233151295e-05, + "loss": 0.9654, + "step": 766 + }, + { + "epoch": 0.13, + "learning_rate": 1.9494669544667545e-05, + "loss": 0.337, + "step": 767 + }, + { + "epoch": 0.13, + "learning_rate": 1.9492963958694326e-05, + "loss": 0.9569, + "step": 768 + }, + { + "epoch": 0.13, + "learning_rate": 1.949125557409611e-05, + "loss": 0.9291, + "step": 769 + }, + { + "epoch": 0.13, + "learning_rate": 1.9489544391376555e-05, + "loss": 1.008, + "step": 770 + }, + { + "epoch": 0.13, + "learning_rate": 1.948783041104013e-05, + "loss": 1.0072, + "step": 771 + }, + { + "epoch": 0.13, + "learning_rate": 1.9486113633592136e-05, + "loss": 1.0161, + "step": 772 + }, + { + "epoch": 0.13, + "learning_rate": 1.94843940595387e-05, + "loss": 0.9737, + "step": 773 + }, + { + "epoch": 0.13, + "learning_rate": 1.9482671689386766e-05, + "loss": 1.0253, + "step": 774 + }, + { + "epoch": 0.13, + "learning_rate": 1.948094652364411e-05, + "loss": 0.9087, + "step": 775 + }, + { + "epoch": 0.13, + "learning_rate": 1.9479218562819326e-05, + "loss": 1.0134, + "step": 776 + }, + { + "epoch": 0.13, + "learning_rate": 1.9477487807421837e-05, + "loss": 1.0301, + "step": 777 + }, + { + "epoch": 0.13, + "learning_rate": 1.9475754257961887e-05, + "loss": 1.0361, + "step": 778 + }, + { + "epoch": 0.13, + "learning_rate": 1.9474017914950546e-05, + "loss": 0.3615, + "step": 779 + }, + { + "epoch": 0.13, + "learning_rate": 1.9472278778899704e-05, + "loss": 1.0714, + "step": 780 + }, + { + "epoch": 0.13, + "learning_rate": 1.9470536850322076e-05, + "loss": 0.9518, + "step": 781 + }, + { + "epoch": 0.13, + "learning_rate": 1.94687921297312e-05, + "loss": 0.9659, + "step": 782 + }, + { + "epoch": 0.13, + "learning_rate": 1.9467044617641445e-05, + "loss": 0.9475, + "step": 783 + }, + { + "epoch": 0.13, + "learning_rate": 1.9465294314567987e-05, + "loss": 1.0415, + "step": 784 + }, + { + "epoch": 0.13, + "learning_rate": 1.946354122102684e-05, + "loss": 0.966, + "step": 785 + }, + { + "epoch": 0.13, + "learning_rate": 1.946178533753483e-05, + "loss": 0.9592, + "step": 786 + }, + { + "epoch": 0.13, + "learning_rate": 1.946002666460961e-05, + "loss": 0.9455, + "step": 787 + }, + { + "epoch": 0.13, + "learning_rate": 1.9458265202769656e-05, + "loss": 0.9784, + "step": 788 + }, + { + "epoch": 0.13, + "learning_rate": 1.945650095253427e-05, + "loss": 1.0447, + "step": 789 + }, + { + "epoch": 0.13, + "learning_rate": 1.945473391442356e-05, + "loss": 0.9969, + "step": 790 + }, + { + "epoch": 0.13, + "learning_rate": 1.9452964088958483e-05, + "loss": 0.9347, + "step": 791 + }, + { + "epoch": 0.13, + "learning_rate": 1.945119147666079e-05, + "loss": 0.9767, + "step": 792 + }, + { + "epoch": 0.13, + "learning_rate": 1.9449416078053067e-05, + "loss": 1.0086, + "step": 793 + }, + { + "epoch": 0.13, + "learning_rate": 1.9447637893658727e-05, + "loss": 0.9777, + "step": 794 + }, + { + "epoch": 0.13, + "learning_rate": 1.944585692400199e-05, + "loss": 1.009, + "step": 795 + }, + { + "epoch": 0.13, + "learning_rate": 1.9444073169607907e-05, + "loss": 1.0068, + "step": 796 + }, + { + "epoch": 0.13, + "learning_rate": 1.9442286631002348e-05, + "loss": 0.9806, + "step": 797 + }, + { + "epoch": 0.13, + "learning_rate": 1.9440497308712e-05, + "loss": 0.9913, + "step": 798 + }, + { + "epoch": 0.13, + "learning_rate": 1.943870520326438e-05, + "loss": 1.0063, + "step": 799 + }, + { + "epoch": 0.13, + "learning_rate": 1.9436910315187815e-05, + "loss": 1.0104, + "step": 800 + }, + { + "epoch": 0.13, + "learning_rate": 1.9435112645011462e-05, + "loss": 0.9572, + "step": 801 + }, + { + "epoch": 0.13, + "learning_rate": 1.943331219326528e-05, + "loss": 1.0414, + "step": 802 + }, + { + "epoch": 0.13, + "learning_rate": 1.9431508960480075e-05, + "loss": 1.0178, + "step": 803 + }, + { + "epoch": 0.13, + "learning_rate": 1.9429702947187455e-05, + "loss": 1.0223, + "step": 804 + }, + { + "epoch": 0.13, + "learning_rate": 1.942789415391985e-05, + "loss": 0.9504, + "step": 805 + }, + { + "epoch": 0.14, + "learning_rate": 1.942608258121051e-05, + "loss": 1.0596, + "step": 806 + }, + { + "epoch": 0.14, + "learning_rate": 1.9424268229593507e-05, + "loss": 0.9984, + "step": 807 + }, + { + "epoch": 0.14, + "learning_rate": 1.942245109960373e-05, + "loss": 0.9768, + "step": 808 + }, + { + "epoch": 0.14, + "learning_rate": 1.9420631191776892e-05, + "loss": 0.9916, + "step": 809 + }, + { + "epoch": 0.14, + "learning_rate": 1.9418808506649515e-05, + "loss": 1.0227, + "step": 810 + }, + { + "epoch": 0.14, + "learning_rate": 1.941698304475895e-05, + "loss": 1.0428, + "step": 811 + }, + { + "epoch": 0.14, + "learning_rate": 1.9415154806643358e-05, + "loss": 1.0171, + "step": 812 + }, + { + "epoch": 0.14, + "learning_rate": 1.9413323792841726e-05, + "loss": 0.9998, + "step": 813 + }, + { + "epoch": 0.14, + "learning_rate": 1.941149000389386e-05, + "loss": 0.9847, + "step": 814 + }, + { + "epoch": 0.14, + "learning_rate": 1.940965344034037e-05, + "loss": 1.0219, + "step": 815 + }, + { + "epoch": 0.14, + "learning_rate": 1.94078141027227e-05, + "loss": 0.9924, + "step": 816 + }, + { + "epoch": 0.14, + "learning_rate": 1.940597199158311e-05, + "loss": 0.9577, + "step": 817 + }, + { + "epoch": 0.14, + "learning_rate": 1.9404127107464662e-05, + "loss": 1.0226, + "step": 818 + }, + { + "epoch": 0.14, + "learning_rate": 1.9402279450911255e-05, + "loss": 0.9443, + "step": 819 + }, + { + "epoch": 0.14, + "learning_rate": 1.94004290224676e-05, + "loss": 1.0664, + "step": 820 + }, + { + "epoch": 0.14, + "learning_rate": 1.939857582267922e-05, + "loss": 1.0515, + "step": 821 + }, + { + "epoch": 0.14, + "learning_rate": 1.9396719852092457e-05, + "loss": 1.0169, + "step": 822 + }, + { + "epoch": 0.14, + "learning_rate": 1.9394861111254467e-05, + "loss": 1.018, + "step": 823 + }, + { + "epoch": 0.14, + "learning_rate": 1.939299960071323e-05, + "loss": 0.9474, + "step": 824 + }, + { + "epoch": 0.14, + "learning_rate": 1.9391135321017542e-05, + "loss": 0.9505, + "step": 825 + }, + { + "epoch": 0.14, + "learning_rate": 1.938926827271701e-05, + "loss": 1.035, + "step": 826 + }, + { + "epoch": 0.14, + "learning_rate": 1.938739845636205e-05, + "loss": 0.9702, + "step": 827 + }, + { + "epoch": 0.14, + "learning_rate": 1.938552587250392e-05, + "loss": 1.0072, + "step": 828 + }, + { + "epoch": 0.14, + "learning_rate": 1.9383650521694662e-05, + "loss": 1.0857, + "step": 829 + }, + { + "epoch": 0.14, + "learning_rate": 1.938177240448716e-05, + "loss": 0.9613, + "step": 830 + }, + { + "epoch": 0.14, + "learning_rate": 1.9379891521435098e-05, + "loss": 1.0067, + "step": 831 + }, + { + "epoch": 0.14, + "learning_rate": 1.9378007873092975e-05, + "loss": 1.0064, + "step": 832 + }, + { + "epoch": 0.14, + "learning_rate": 1.9376121460016123e-05, + "loss": 1.0331, + "step": 833 + }, + { + "epoch": 0.14, + "learning_rate": 1.9374232282760664e-05, + "loss": 0.994, + "step": 834 + }, + { + "epoch": 0.14, + "learning_rate": 1.937234034188355e-05, + "loss": 1.0052, + "step": 835 + }, + { + "epoch": 0.14, + "learning_rate": 1.9370445637942552e-05, + "loss": 1.0053, + "step": 836 + }, + { + "epoch": 0.14, + "learning_rate": 1.9368548171496244e-05, + "loss": 0.9884, + "step": 837 + }, + { + "epoch": 0.14, + "learning_rate": 1.9366647943104014e-05, + "loss": 0.9563, + "step": 838 + }, + { + "epoch": 0.14, + "learning_rate": 1.9364744953326077e-05, + "loss": 0.9919, + "step": 839 + }, + { + "epoch": 0.14, + "learning_rate": 1.936283920272345e-05, + "loss": 1.0225, + "step": 840 + }, + { + "epoch": 0.14, + "learning_rate": 1.9360930691857966e-05, + "loss": 0.9591, + "step": 841 + }, + { + "epoch": 0.14, + "learning_rate": 1.935901942129228e-05, + "loss": 0.9704, + "step": 842 + }, + { + "epoch": 0.14, + "learning_rate": 1.935710539158985e-05, + "loss": 1.0419, + "step": 843 + }, + { + "epoch": 0.14, + "learning_rate": 1.9355188603314956e-05, + "loss": 0.995, + "step": 844 + }, + { + "epoch": 0.14, + "learning_rate": 1.935326905703268e-05, + "loss": 1.0158, + "step": 845 + }, + { + "epoch": 0.14, + "learning_rate": 1.9351346753308933e-05, + "loss": 0.9818, + "step": 846 + }, + { + "epoch": 0.14, + "learning_rate": 1.9349421692710428e-05, + "loss": 0.973, + "step": 847 + }, + { + "epoch": 0.14, + "learning_rate": 1.9347493875804686e-05, + "loss": 1.0533, + "step": 848 + }, + { + "epoch": 0.14, + "learning_rate": 1.9345563303160056e-05, + "loss": 0.9813, + "step": 849 + }, + { + "epoch": 0.14, + "learning_rate": 1.9343629975345687e-05, + "loss": 1.0012, + "step": 850 + }, + { + "epoch": 0.14, + "learning_rate": 1.9341693892931544e-05, + "loss": 0.9952, + "step": 851 + }, + { + "epoch": 0.14, + "learning_rate": 1.9339755056488405e-05, + "loss": 0.979, + "step": 852 + }, + { + "epoch": 0.14, + "learning_rate": 1.9337813466587864e-05, + "loss": 0.9911, + "step": 853 + }, + { + "epoch": 0.14, + "learning_rate": 1.9335869123802313e-05, + "loss": 0.9049, + "step": 854 + }, + { + "epoch": 0.14, + "learning_rate": 1.933392202870497e-05, + "loss": 0.9446, + "step": 855 + }, + { + "epoch": 0.14, + "learning_rate": 1.9331972181869857e-05, + "loss": 0.9499, + "step": 856 + }, + { + "epoch": 0.14, + "learning_rate": 1.9330019583871813e-05, + "loss": 0.3065, + "step": 857 + }, + { + "epoch": 0.14, + "learning_rate": 1.932806423528648e-05, + "loss": 1.0105, + "step": 858 + }, + { + "epoch": 0.14, + "learning_rate": 1.9326106136690322e-05, + "loss": 1.0056, + "step": 859 + }, + { + "epoch": 0.14, + "learning_rate": 1.9324145288660595e-05, + "loss": 0.9649, + "step": 860 + }, + { + "epoch": 0.14, + "learning_rate": 1.9322181691775387e-05, + "loss": 0.9757, + "step": 861 + }, + { + "epoch": 0.14, + "learning_rate": 1.9320215346613586e-05, + "loss": 0.9648, + "step": 862 + }, + { + "epoch": 0.14, + "learning_rate": 1.9318246253754886e-05, + "loss": 0.939, + "step": 863 + }, + { + "epoch": 0.14, + "learning_rate": 1.93162744137798e-05, + "loss": 0.9588, + "step": 864 + }, + { + "epoch": 0.15, + "learning_rate": 1.931429982726965e-05, + "loss": 0.9781, + "step": 865 + }, + { + "epoch": 0.15, + "learning_rate": 1.9312322494806556e-05, + "loss": 0.9482, + "step": 866 + }, + { + "epoch": 0.15, + "learning_rate": 1.9310342416973468e-05, + "loss": 1.0235, + "step": 867 + }, + { + "epoch": 0.15, + "learning_rate": 1.930835959435412e-05, + "loss": 0.9763, + "step": 868 + }, + { + "epoch": 0.15, + "learning_rate": 1.9306374027533078e-05, + "loss": 1.0382, + "step": 869 + }, + { + "epoch": 0.15, + "learning_rate": 1.9304385717095708e-05, + "loss": 0.9759, + "step": 870 + }, + { + "epoch": 0.15, + "learning_rate": 1.930239466362818e-05, + "loss": 1.0406, + "step": 871 + }, + { + "epoch": 0.15, + "learning_rate": 1.9300400867717484e-05, + "loss": 0.9649, + "step": 872 + }, + { + "epoch": 0.15, + "learning_rate": 1.9298404329951404e-05, + "loss": 0.8946, + "step": 873 + }, + { + "epoch": 0.15, + "learning_rate": 1.9296405050918546e-05, + "loss": 0.3279, + "step": 874 + }, + { + "epoch": 0.15, + "learning_rate": 1.9294403031208317e-05, + "loss": 1.0636, + "step": 875 + }, + { + "epoch": 0.15, + "learning_rate": 1.929239827141093e-05, + "loss": 1.0088, + "step": 876 + }, + { + "epoch": 0.15, + "learning_rate": 1.929039077211741e-05, + "loss": 0.9748, + "step": 877 + }, + { + "epoch": 0.15, + "learning_rate": 1.9288380533919597e-05, + "loss": 1.0252, + "step": 878 + }, + { + "epoch": 0.15, + "learning_rate": 1.928636755741012e-05, + "loss": 0.9938, + "step": 879 + }, + { + "epoch": 0.15, + "learning_rate": 1.928435184318243e-05, + "loss": 0.9625, + "step": 880 + }, + { + "epoch": 0.15, + "learning_rate": 1.9282333391830777e-05, + "loss": 0.336, + "step": 881 + }, + { + "epoch": 0.15, + "learning_rate": 1.9280312203950228e-05, + "loss": 0.9471, + "step": 882 + }, + { + "epoch": 0.15, + "learning_rate": 1.9278288280136647e-05, + "loss": 1.0034, + "step": 883 + }, + { + "epoch": 0.15, + "learning_rate": 1.927626162098671e-05, + "loss": 0.9751, + "step": 884 + }, + { + "epoch": 0.15, + "learning_rate": 1.9274232227097885e-05, + "loss": 1.0158, + "step": 885 + }, + { + "epoch": 0.15, + "learning_rate": 1.927220009906848e-05, + "loss": 0.9773, + "step": 886 + }, + { + "epoch": 0.15, + "learning_rate": 1.927016523749757e-05, + "loss": 0.3307, + "step": 887 + }, + { + "epoch": 0.15, + "learning_rate": 1.9268127642985058e-05, + "loss": 0.9597, + "step": 888 + }, + { + "epoch": 0.15, + "learning_rate": 1.9266087316131655e-05, + "loss": 0.945, + "step": 889 + }, + { + "epoch": 0.15, + "learning_rate": 1.9264044257538864e-05, + "loss": 1.0251, + "step": 890 + }, + { + "epoch": 0.15, + "learning_rate": 1.9261998467809e-05, + "loss": 0.9866, + "step": 891 + }, + { + "epoch": 0.15, + "learning_rate": 1.9259949947545186e-05, + "loss": 0.9298, + "step": 892 + }, + { + "epoch": 0.15, + "learning_rate": 1.925789869735134e-05, + "loss": 0.9571, + "step": 893 + }, + { + "epoch": 0.15, + "learning_rate": 1.9255844717832206e-05, + "loss": 0.9952, + "step": 894 + }, + { + "epoch": 0.15, + "learning_rate": 1.9253788009593308e-05, + "loss": 0.9588, + "step": 895 + }, + { + "epoch": 0.15, + "learning_rate": 1.9251728573240983e-05, + "loss": 1.0245, + "step": 896 + }, + { + "epoch": 0.15, + "learning_rate": 1.9249666409382387e-05, + "loss": 0.3245, + "step": 897 + }, + { + "epoch": 0.15, + "learning_rate": 1.9247601518625454e-05, + "loss": 1.0946, + "step": 898 + }, + { + "epoch": 0.15, + "learning_rate": 1.9245533901578943e-05, + "loss": 1.0348, + "step": 899 + }, + { + "epoch": 0.15, + "learning_rate": 1.9243463558852405e-05, + "loss": 1.022, + "step": 900 + }, + { + "epoch": 0.15, + "learning_rate": 1.92413904910562e-05, + "loss": 1.0234, + "step": 901 + }, + { + "epoch": 0.15, + "learning_rate": 1.9239314698801493e-05, + "loss": 0.9742, + "step": 902 + }, + { + "epoch": 0.15, + "learning_rate": 1.9237236182700244e-05, + "loss": 1.041, + "step": 903 + }, + { + "epoch": 0.15, + "learning_rate": 1.9235154943365224e-05, + "loss": 0.9639, + "step": 904 + }, + { + "epoch": 0.15, + "learning_rate": 1.9233070981410007e-05, + "loss": 0.9265, + "step": 905 + }, + { + "epoch": 0.15, + "learning_rate": 1.923098429744896e-05, + "loss": 0.996, + "step": 906 + }, + { + "epoch": 0.15, + "learning_rate": 1.9228894892097267e-05, + "loss": 0.3218, + "step": 907 + }, + { + "epoch": 0.15, + "learning_rate": 1.92268027659709e-05, + "loss": 0.9905, + "step": 908 + }, + { + "epoch": 0.15, + "learning_rate": 1.9224707919686648e-05, + "loss": 0.9914, + "step": 909 + }, + { + "epoch": 0.15, + "learning_rate": 1.922261035386208e-05, + "loss": 1.0077, + "step": 910 + }, + { + "epoch": 0.15, + "learning_rate": 1.9220510069115595e-05, + "loss": 0.9953, + "step": 911 + }, + { + "epoch": 0.15, + "learning_rate": 1.921840706606637e-05, + "loss": 0.9565, + "step": 912 + }, + { + "epoch": 0.15, + "learning_rate": 1.921630134533439e-05, + "loss": 1.0236, + "step": 913 + }, + { + "epoch": 0.15, + "learning_rate": 1.9214192907540452e-05, + "loss": 0.9392, + "step": 914 + }, + { + "epoch": 0.15, + "learning_rate": 1.9212081753306143e-05, + "loss": 0.9978, + "step": 915 + }, + { + "epoch": 0.15, + "learning_rate": 1.920996788325385e-05, + "loss": 0.3377, + "step": 916 + }, + { + "epoch": 0.15, + "learning_rate": 1.9207851298006766e-05, + "loss": 0.9754, + "step": 917 + }, + { + "epoch": 0.15, + "learning_rate": 1.920573199818888e-05, + "loss": 0.9965, + "step": 918 + }, + { + "epoch": 0.15, + "learning_rate": 1.9203609984424993e-05, + "loss": 1.0739, + "step": 919 + }, + { + "epoch": 0.15, + "learning_rate": 1.9201485257340683e-05, + "loss": 1.017, + "step": 920 + }, + { + "epoch": 0.15, + "learning_rate": 1.9199357817562347e-05, + "loss": 0.9816, + "step": 921 + }, + { + "epoch": 0.15, + "learning_rate": 1.9197227665717183e-05, + "loss": 0.9237, + "step": 922 + }, + { + "epoch": 0.15, + "learning_rate": 1.9195094802433175e-05, + "loss": 0.9959, + "step": 923 + }, + { + "epoch": 0.15, + "learning_rate": 1.9192959228339115e-05, + "loss": 0.9705, + "step": 924 + }, + { + "epoch": 0.16, + "learning_rate": 1.9190820944064594e-05, + "loss": 0.9917, + "step": 925 + }, + { + "epoch": 0.16, + "learning_rate": 1.918867995024e-05, + "loss": 0.9613, + "step": 926 + }, + { + "epoch": 0.16, + "learning_rate": 1.918653624749652e-05, + "loss": 1.0215, + "step": 927 + }, + { + "epoch": 0.16, + "learning_rate": 1.918438983646614e-05, + "loss": 1.0479, + "step": 928 + }, + { + "epoch": 0.16, + "learning_rate": 1.9182240717781642e-05, + "loss": 1.0077, + "step": 929 + }, + { + "epoch": 0.16, + "learning_rate": 1.918008889207661e-05, + "loss": 0.9867, + "step": 930 + }, + { + "epoch": 0.16, + "learning_rate": 1.917793435998543e-05, + "loss": 1.016, + "step": 931 + }, + { + "epoch": 0.16, + "learning_rate": 1.9175777122143273e-05, + "loss": 0.9152, + "step": 932 + }, + { + "epoch": 0.16, + "learning_rate": 1.917361717918612e-05, + "loss": 0.9694, + "step": 933 + }, + { + "epoch": 0.16, + "learning_rate": 1.9171454531750745e-05, + "loss": 0.99, + "step": 934 + }, + { + "epoch": 0.16, + "learning_rate": 1.9169289180474714e-05, + "loss": 0.9906, + "step": 935 + }, + { + "epoch": 0.16, + "learning_rate": 1.91671211259964e-05, + "loss": 0.9719, + "step": 936 + }, + { + "epoch": 0.16, + "learning_rate": 1.9164950368954965e-05, + "loss": 0.9791, + "step": 937 + }, + { + "epoch": 0.16, + "learning_rate": 1.9162776909990375e-05, + "loss": 0.9445, + "step": 938 + }, + { + "epoch": 0.16, + "learning_rate": 1.9160600749743384e-05, + "loss": 0.9866, + "step": 939 + }, + { + "epoch": 0.16, + "learning_rate": 1.9158421888855548e-05, + "loss": 0.9965, + "step": 940 + }, + { + "epoch": 0.16, + "learning_rate": 1.9156240327969223e-05, + "loss": 0.9864, + "step": 941 + }, + { + "epoch": 0.16, + "learning_rate": 1.9154056067727548e-05, + "loss": 0.3406, + "step": 942 + }, + { + "epoch": 0.16, + "learning_rate": 1.915186910877447e-05, + "loss": 1.0603, + "step": 943 + }, + { + "epoch": 0.16, + "learning_rate": 1.9149679451754726e-05, + "loss": 0.9508, + "step": 944 + }, + { + "epoch": 0.16, + "learning_rate": 1.9147487097313854e-05, + "loss": 0.9682, + "step": 945 + }, + { + "epoch": 0.16, + "learning_rate": 1.9145292046098175e-05, + "loss": 1.0107, + "step": 946 + }, + { + "epoch": 0.16, + "learning_rate": 1.914309429875482e-05, + "loss": 0.9738, + "step": 947 + }, + { + "epoch": 0.16, + "learning_rate": 1.9140893855931705e-05, + "loss": 0.9991, + "step": 948 + }, + { + "epoch": 0.16, + "learning_rate": 1.9138690718277542e-05, + "loss": 0.9743, + "step": 949 + }, + { + "epoch": 0.16, + "learning_rate": 1.913648488644184e-05, + "loss": 0.9799, + "step": 950 + }, + { + "epoch": 0.16, + "learning_rate": 1.9134276361074907e-05, + "loss": 0.9338, + "step": 951 + }, + { + "epoch": 0.16, + "learning_rate": 1.9132065142827834e-05, + "loss": 0.9534, + "step": 952 + }, + { + "epoch": 0.16, + "learning_rate": 1.912985123235251e-05, + "loss": 1.0146, + "step": 953 + }, + { + "epoch": 0.16, + "learning_rate": 1.912763463030162e-05, + "loss": 0.9317, + "step": 954 + }, + { + "epoch": 0.16, + "learning_rate": 1.9125415337328644e-05, + "loss": 1.0198, + "step": 955 + }, + { + "epoch": 0.16, + "learning_rate": 1.912319335408785e-05, + "loss": 1.0304, + "step": 956 + }, + { + "epoch": 0.16, + "learning_rate": 1.9120968681234303e-05, + "loss": 0.3646, + "step": 957 + }, + { + "epoch": 0.16, + "learning_rate": 1.9118741319423862e-05, + "loss": 0.951, + "step": 958 + }, + { + "epoch": 0.16, + "learning_rate": 1.9116511269313173e-05, + "loss": 0.9696, + "step": 959 + }, + { + "epoch": 0.16, + "learning_rate": 1.9114278531559677e-05, + "loss": 0.992, + "step": 960 + }, + { + "epoch": 0.16, + "learning_rate": 1.9112043106821612e-05, + "loss": 1.0155, + "step": 961 + }, + { + "epoch": 0.16, + "learning_rate": 1.9109804995758003e-05, + "loss": 1.0204, + "step": 962 + }, + { + "epoch": 0.16, + "learning_rate": 1.910756419902867e-05, + "loss": 1.0649, + "step": 963 + }, + { + "epoch": 0.16, + "learning_rate": 1.910532071729422e-05, + "loss": 1.0015, + "step": 964 + }, + { + "epoch": 0.16, + "learning_rate": 1.9103074551216058e-05, + "loss": 0.9717, + "step": 965 + }, + { + "epoch": 0.16, + "learning_rate": 1.9100825701456376e-05, + "loss": 0.9782, + "step": 966 + }, + { + "epoch": 0.16, + "learning_rate": 1.909857416867816e-05, + "loss": 1.0428, + "step": 967 + }, + { + "epoch": 0.16, + "learning_rate": 1.9096319953545186e-05, + "loss": 0.9706, + "step": 968 + }, + { + "epoch": 0.16, + "learning_rate": 1.9094063056722015e-05, + "loss": 1.0082, + "step": 969 + }, + { + "epoch": 0.16, + "learning_rate": 1.9091803478874008e-05, + "loss": 1.0324, + "step": 970 + }, + { + "epoch": 0.16, + "learning_rate": 1.9089541220667312e-05, + "loss": 1.0048, + "step": 971 + }, + { + "epoch": 0.16, + "learning_rate": 1.908727628276886e-05, + "loss": 0.9049, + "step": 972 + }, + { + "epoch": 0.16, + "learning_rate": 1.908500866584639e-05, + "loss": 0.9893, + "step": 973 + }, + { + "epoch": 0.16, + "learning_rate": 1.9082738370568407e-05, + "loss": 0.9912, + "step": 974 + }, + { + "epoch": 0.16, + "learning_rate": 1.908046539760422e-05, + "loss": 0.9915, + "step": 975 + }, + { + "epoch": 0.16, + "learning_rate": 1.9078189747623934e-05, + "loss": 0.9702, + "step": 976 + }, + { + "epoch": 0.16, + "learning_rate": 1.9075911421298425e-05, + "loss": 1.0209, + "step": 977 + }, + { + "epoch": 0.16, + "learning_rate": 1.9073630419299373e-05, + "loss": 0.9733, + "step": 978 + }, + { + "epoch": 0.16, + "learning_rate": 1.9071346742299238e-05, + "loss": 0.9528, + "step": 979 + }, + { + "epoch": 0.16, + "learning_rate": 1.9069060390971273e-05, + "loss": 0.9689, + "step": 980 + }, + { + "epoch": 0.16, + "learning_rate": 1.9066771365989515e-05, + "loss": 0.3441, + "step": 981 + }, + { + "epoch": 0.16, + "learning_rate": 1.90644796680288e-05, + "loss": 1.0499, + "step": 982 + }, + { + "epoch": 0.16, + "learning_rate": 1.906218529776474e-05, + "loss": 1.0037, + "step": 983 + }, + { + "epoch": 0.16, + "learning_rate": 1.905988825587374e-05, + "loss": 1.0219, + "step": 984 + }, + { + "epoch": 0.17, + "learning_rate": 1.9057588543032988e-05, + "loss": 0.9733, + "step": 985 + }, + { + "epoch": 0.17, + "learning_rate": 1.9055286159920466e-05, + "loss": 0.3237, + "step": 986 + }, + { + "epoch": 0.17, + "learning_rate": 1.9052981107214947e-05, + "loss": 0.9124, + "step": 987 + }, + { + "epoch": 0.17, + "learning_rate": 1.9050673385595977e-05, + "loss": 0.927, + "step": 988 + }, + { + "epoch": 0.17, + "learning_rate": 1.9048362995743898e-05, + "loss": 0.9781, + "step": 989 + }, + { + "epoch": 0.17, + "learning_rate": 1.9046049938339837e-05, + "loss": 0.9409, + "step": 990 + }, + { + "epoch": 0.17, + "learning_rate": 1.904373421406571e-05, + "loss": 0.9933, + "step": 991 + }, + { + "epoch": 0.17, + "learning_rate": 1.9041415823604214e-05, + "loss": 1.0016, + "step": 992 + }, + { + "epoch": 0.17, + "learning_rate": 1.9039094767638834e-05, + "loss": 0.959, + "step": 993 + }, + { + "epoch": 0.17, + "learning_rate": 1.9036771046853845e-05, + "loss": 0.99, + "step": 994 + }, + { + "epoch": 0.17, + "learning_rate": 1.9034444661934302e-05, + "loss": 0.965, + "step": 995 + }, + { + "epoch": 0.17, + "learning_rate": 1.9032115613566047e-05, + "loss": 0.3551, + "step": 996 + }, + { + "epoch": 0.17, + "learning_rate": 1.902978390243571e-05, + "loss": 0.9828, + "step": 997 + }, + { + "epoch": 0.17, + "learning_rate": 1.9027449529230703e-05, + "loss": 0.9484, + "step": 998 + }, + { + "epoch": 0.17, + "learning_rate": 1.902511249463922e-05, + "loss": 0.9768, + "step": 999 + }, + { + "epoch": 0.17, + "learning_rate": 1.9022772799350248e-05, + "loss": 0.9765, + "step": 1000 + }, + { + "epoch": 0.17, + "learning_rate": 1.9020430444053554e-05, + "loss": 0.9316, + "step": 1001 + }, + { + "epoch": 0.17, + "learning_rate": 1.9018085429439683e-05, + "loss": 0.9784, + "step": 1002 + }, + { + "epoch": 0.17, + "learning_rate": 1.9015737756199976e-05, + "loss": 0.9783, + "step": 1003 + }, + { + "epoch": 0.17, + "learning_rate": 1.901338742502655e-05, + "loss": 0.9726, + "step": 1004 + }, + { + "epoch": 0.17, + "learning_rate": 1.9011034436612305e-05, + "loss": 0.948, + "step": 1005 + }, + { + "epoch": 0.17, + "learning_rate": 1.9008678791650927e-05, + "loss": 0.9689, + "step": 1006 + }, + { + "epoch": 0.17, + "learning_rate": 1.9006320490836886e-05, + "loss": 0.9365, + "step": 1007 + }, + { + "epoch": 0.17, + "learning_rate": 1.9003959534865437e-05, + "loss": 0.9481, + "step": 1008 + }, + { + "epoch": 0.17, + "learning_rate": 1.9001595924432606e-05, + "loss": 0.9822, + "step": 1009 + }, + { + "epoch": 0.17, + "learning_rate": 1.899922966023522e-05, + "loss": 0.9999, + "step": 1010 + }, + { + "epoch": 0.17, + "learning_rate": 1.8996860742970872e-05, + "loss": 1.024, + "step": 1011 + }, + { + "epoch": 0.17, + "learning_rate": 1.8994489173337943e-05, + "loss": 0.9749, + "step": 1012 + }, + { + "epoch": 0.17, + "learning_rate": 1.8992114952035602e-05, + "loss": 0.9818, + "step": 1013 + }, + { + "epoch": 0.17, + "learning_rate": 1.8989738079763788e-05, + "loss": 1.0473, + "step": 1014 + }, + { + "epoch": 0.17, + "learning_rate": 1.8987358557223232e-05, + "loss": 0.9532, + "step": 1015 + }, + { + "epoch": 0.17, + "learning_rate": 1.898497638511544e-05, + "loss": 1.0084, + "step": 1016 + }, + { + "epoch": 0.17, + "learning_rate": 1.8982591564142702e-05, + "loss": 1.0289, + "step": 1017 + }, + { + "epoch": 0.17, + "learning_rate": 1.8980204095008087e-05, + "loss": 0.9786, + "step": 1018 + }, + { + "epoch": 0.17, + "learning_rate": 1.897781397841545e-05, + "loss": 0.9731, + "step": 1019 + }, + { + "epoch": 0.17, + "learning_rate": 1.8975421215069416e-05, + "loss": 0.9803, + "step": 1020 + }, + { + "epoch": 0.17, + "learning_rate": 1.8973025805675403e-05, + "loss": 0.9652, + "step": 1021 + }, + { + "epoch": 0.17, + "learning_rate": 1.8970627750939595e-05, + "loss": 1.0009, + "step": 1022 + }, + { + "epoch": 0.17, + "learning_rate": 1.896822705156897e-05, + "loss": 0.9512, + "step": 1023 + }, + { + "epoch": 0.17, + "learning_rate": 1.896582370827128e-05, + "loss": 0.9756, + "step": 1024 + }, + { + "epoch": 0.17, + "learning_rate": 1.8963417721755052e-05, + "loss": 1.0012, + "step": 1025 + }, + { + "epoch": 0.17, + "learning_rate": 1.8961009092729598e-05, + "loss": 1.0186, + "step": 1026 + }, + { + "epoch": 0.17, + "learning_rate": 1.8958597821905004e-05, + "loss": 0.9967, + "step": 1027 + }, + { + "epoch": 0.17, + "learning_rate": 1.8956183909992144e-05, + "loss": 0.9859, + "step": 1028 + }, + { + "epoch": 0.17, + "learning_rate": 1.8953767357702655e-05, + "loss": 0.3478, + "step": 1029 + }, + { + "epoch": 0.17, + "learning_rate": 1.8951348165748973e-05, + "loss": 0.9693, + "step": 1030 + }, + { + "epoch": 0.17, + "learning_rate": 1.894892633484429e-05, + "loss": 1.0145, + "step": 1031 + }, + { + "epoch": 0.17, + "learning_rate": 1.89465018657026e-05, + "loss": 0.9428, + "step": 1032 + }, + { + "epoch": 0.17, + "learning_rate": 1.8944074759038648e-05, + "loss": 1.0153, + "step": 1033 + }, + { + "epoch": 0.17, + "learning_rate": 1.894164501556798e-05, + "loss": 1.0287, + "step": 1034 + }, + { + "epoch": 0.17, + "learning_rate": 1.8939212636006902e-05, + "loss": 0.9632, + "step": 1035 + }, + { + "epoch": 0.17, + "learning_rate": 1.8936777621072514e-05, + "loss": 0.953, + "step": 1036 + }, + { + "epoch": 0.17, + "learning_rate": 1.8934339971482676e-05, + "loss": 0.9859, + "step": 1037 + }, + { + "epoch": 0.17, + "learning_rate": 1.8931899687956038e-05, + "loss": 0.9483, + "step": 1038 + }, + { + "epoch": 0.17, + "learning_rate": 1.892945677121202e-05, + "loss": 1.0184, + "step": 1039 + }, + { + "epoch": 0.17, + "learning_rate": 1.892701122197082e-05, + "loss": 0.9609, + "step": 1040 + }, + { + "epoch": 0.17, + "learning_rate": 1.8924563040953403e-05, + "loss": 1.0094, + "step": 1041 + }, + { + "epoch": 0.17, + "learning_rate": 1.8922112228881532e-05, + "loss": 0.9793, + "step": 1042 + }, + { + "epoch": 0.17, + "learning_rate": 1.891965878647772e-05, + "loss": 0.9323, + "step": 1043 + }, + { + "epoch": 0.18, + "learning_rate": 1.8917202714465275e-05, + "loss": 0.9414, + "step": 1044 + }, + { + "epoch": 0.18, + "learning_rate": 1.8914744013568273e-05, + "loss": 1.0091, + "step": 1045 + }, + { + "epoch": 0.18, + "learning_rate": 1.891228268451156e-05, + "loss": 1.0217, + "step": 1046 + }, + { + "epoch": 0.18, + "learning_rate": 1.8909818728020765e-05, + "loss": 1.0197, + "step": 1047 + }, + { + "epoch": 0.18, + "learning_rate": 1.8907352144822285e-05, + "loss": 0.9881, + "step": 1048 + }, + { + "epoch": 0.18, + "learning_rate": 1.8904882935643293e-05, + "loss": 0.9708, + "step": 1049 + }, + { + "epoch": 0.18, + "learning_rate": 1.8902411101211747e-05, + "loss": 0.3314, + "step": 1050 + }, + { + "epoch": 0.18, + "learning_rate": 1.889993664225636e-05, + "loss": 0.9396, + "step": 1051 + }, + { + "epoch": 0.18, + "learning_rate": 1.8897459559506632e-05, + "loss": 0.9854, + "step": 1052 + }, + { + "epoch": 0.18, + "learning_rate": 1.8894979853692836e-05, + "loss": 0.9372, + "step": 1053 + }, + { + "epoch": 0.18, + "learning_rate": 1.889249752554601e-05, + "loss": 1.0329, + "step": 1054 + }, + { + "epoch": 0.18, + "learning_rate": 1.889001257579797e-05, + "loss": 0.9695, + "step": 1055 + }, + { + "epoch": 0.18, + "learning_rate": 1.888752500518131e-05, + "loss": 0.9402, + "step": 1056 + }, + { + "epoch": 0.18, + "learning_rate": 1.888503481442939e-05, + "loss": 0.9693, + "step": 1057 + }, + { + "epoch": 0.18, + "learning_rate": 1.8882542004276343e-05, + "loss": 0.3051, + "step": 1058 + }, + { + "epoch": 0.18, + "learning_rate": 1.8880046575457072e-05, + "loss": 1.0506, + "step": 1059 + }, + { + "epoch": 0.18, + "learning_rate": 1.8877548528707267e-05, + "loss": 0.9983, + "step": 1060 + }, + { + "epoch": 0.18, + "learning_rate": 1.8875047864763366e-05, + "loss": 1.0086, + "step": 1061 + }, + { + "epoch": 0.18, + "learning_rate": 1.88725445843626e-05, + "loss": 0.9607, + "step": 1062 + }, + { + "epoch": 0.18, + "learning_rate": 1.887003868824295e-05, + "loss": 0.9428, + "step": 1063 + }, + { + "epoch": 0.18, + "learning_rate": 1.8867530177143192e-05, + "loss": 0.9446, + "step": 1064 + }, + { + "epoch": 0.18, + "learning_rate": 1.886501905180286e-05, + "loss": 0.9358, + "step": 1065 + }, + { + "epoch": 0.18, + "learning_rate": 1.8862505312962257e-05, + "loss": 0.9965, + "step": 1066 + }, + { + "epoch": 0.18, + "learning_rate": 1.8859988961362455e-05, + "loss": 0.9442, + "step": 1067 + }, + { + "epoch": 0.18, + "learning_rate": 1.885746999774531e-05, + "loss": 0.9833, + "step": 1068 + }, + { + "epoch": 0.18, + "learning_rate": 1.8854948422853436e-05, + "loss": 0.9434, + "step": 1069 + }, + { + "epoch": 0.18, + "learning_rate": 1.8852424237430215e-05, + "loss": 0.3524, + "step": 1070 + }, + { + "epoch": 0.18, + "learning_rate": 1.884989744221981e-05, + "loss": 0.9363, + "step": 1071 + }, + { + "epoch": 0.18, + "learning_rate": 1.8847368037967138e-05, + "loss": 0.9963, + "step": 1072 + }, + { + "epoch": 0.18, + "learning_rate": 1.8844836025417905e-05, + "loss": 1.0056, + "step": 1073 + }, + { + "epoch": 0.18, + "learning_rate": 1.8842301405318567e-05, + "loss": 0.9756, + "step": 1074 + }, + { + "epoch": 0.18, + "learning_rate": 1.8839764178416354e-05, + "loss": 0.9927, + "step": 1075 + }, + { + "epoch": 0.18, + "learning_rate": 1.8837224345459276e-05, + "loss": 1.0286, + "step": 1076 + }, + { + "epoch": 0.18, + "learning_rate": 1.8834681907196094e-05, + "loss": 0.3742, + "step": 1077 + }, + { + "epoch": 0.18, + "learning_rate": 1.883213686437635e-05, + "loss": 0.9803, + "step": 1078 + }, + { + "epoch": 0.18, + "learning_rate": 1.882958921775035e-05, + "loss": 0.939, + "step": 1079 + }, + { + "epoch": 0.18, + "learning_rate": 1.8827038968069163e-05, + "loss": 0.9506, + "step": 1080 + }, + { + "epoch": 0.18, + "learning_rate": 1.8824486116084633e-05, + "loss": 0.9882, + "step": 1081 + }, + { + "epoch": 0.18, + "learning_rate": 1.8821930662549363e-05, + "loss": 0.9504, + "step": 1082 + }, + { + "epoch": 0.18, + "learning_rate": 1.881937260821673e-05, + "loss": 1.0322, + "step": 1083 + }, + { + "epoch": 0.18, + "learning_rate": 1.8816811953840877e-05, + "loss": 1.0129, + "step": 1084 + }, + { + "epoch": 0.18, + "learning_rate": 1.881424870017671e-05, + "loss": 1.0303, + "step": 1085 + }, + { + "epoch": 0.18, + "learning_rate": 1.8811682847979902e-05, + "loss": 1.0114, + "step": 1086 + }, + { + "epoch": 0.18, + "learning_rate": 1.8809114398006895e-05, + "loss": 1.0081, + "step": 1087 + }, + { + "epoch": 0.18, + "learning_rate": 1.8806543351014893e-05, + "loss": 0.3205, + "step": 1088 + }, + { + "epoch": 0.18, + "learning_rate": 1.8803969707761866e-05, + "loss": 1.0217, + "step": 1089 + }, + { + "epoch": 0.18, + "learning_rate": 1.8801393469006558e-05, + "loss": 0.9843, + "step": 1090 + }, + { + "epoch": 0.18, + "learning_rate": 1.879881463550846e-05, + "loss": 0.9334, + "step": 1091 + }, + { + "epoch": 0.18, + "learning_rate": 1.879623320802785e-05, + "loss": 1.0211, + "step": 1092 + }, + { + "epoch": 0.18, + "learning_rate": 1.8793649187325754e-05, + "loss": 0.9966, + "step": 1093 + }, + { + "epoch": 0.18, + "learning_rate": 1.879106257416397e-05, + "loss": 0.9717, + "step": 1094 + }, + { + "epoch": 0.18, + "learning_rate": 1.8788473369305058e-05, + "loss": 0.996, + "step": 1095 + }, + { + "epoch": 0.18, + "learning_rate": 1.8785881573512345e-05, + "loss": 1.0669, + "step": 1096 + }, + { + "epoch": 0.18, + "learning_rate": 1.8783287187549915e-05, + "loss": 0.9869, + "step": 1097 + }, + { + "epoch": 0.18, + "learning_rate": 1.878069021218262e-05, + "loss": 0.9617, + "step": 1098 + }, + { + "epoch": 0.18, + "learning_rate": 1.877809064817608e-05, + "loss": 0.9576, + "step": 1099 + }, + { + "epoch": 0.18, + "learning_rate": 1.8775488496296672e-05, + "loss": 0.977, + "step": 1100 + }, + { + "epoch": 0.18, + "learning_rate": 1.877288375731154e-05, + "loss": 0.9904, + "step": 1101 + }, + { + "epoch": 0.18, + "learning_rate": 1.8770276431988582e-05, + "loss": 1.049, + "step": 1102 + }, + { + "epoch": 0.18, + "learning_rate": 1.876766652109647e-05, + "loss": 0.963, + "step": 1103 + }, + { + "epoch": 0.19, + "learning_rate": 1.8765054025404627e-05, + "loss": 0.997, + "step": 1104 + }, + { + "epoch": 0.19, + "learning_rate": 1.8762438945683252e-05, + "loss": 1.0017, + "step": 1105 + }, + { + "epoch": 0.19, + "learning_rate": 1.875982128270329e-05, + "loss": 0.9832, + "step": 1106 + }, + { + "epoch": 0.19, + "learning_rate": 1.875720103723646e-05, + "loss": 0.9542, + "step": 1107 + }, + { + "epoch": 0.19, + "learning_rate": 1.875457821005524e-05, + "loss": 0.9662, + "step": 1108 + }, + { + "epoch": 0.19, + "learning_rate": 1.8751952801932866e-05, + "loss": 0.9121, + "step": 1109 + }, + { + "epoch": 0.19, + "learning_rate": 1.8749324813643328e-05, + "loss": 0.9428, + "step": 1110 + }, + { + "epoch": 0.19, + "learning_rate": 1.8746694245961395e-05, + "loss": 1.0123, + "step": 1111 + }, + { + "epoch": 0.19, + "learning_rate": 1.874406109966258e-05, + "loss": 1.0193, + "step": 1112 + }, + { + "epoch": 0.19, + "learning_rate": 1.874142537552316e-05, + "loss": 0.9992, + "step": 1113 + }, + { + "epoch": 0.19, + "learning_rate": 1.873878707432018e-05, + "loss": 1.0042, + "step": 1114 + }, + { + "epoch": 0.19, + "learning_rate": 1.8736146196831433e-05, + "loss": 1.0293, + "step": 1115 + }, + { + "epoch": 0.19, + "learning_rate": 1.8733502743835483e-05, + "loss": 0.9998, + "step": 1116 + }, + { + "epoch": 0.19, + "learning_rate": 1.8730856716111642e-05, + "loss": 1.0046, + "step": 1117 + }, + { + "epoch": 0.19, + "learning_rate": 1.8728208114439992e-05, + "loss": 0.3424, + "step": 1118 + }, + { + "epoch": 0.19, + "learning_rate": 1.8725556939601365e-05, + "loss": 0.9906, + "step": 1119 + }, + { + "epoch": 0.19, + "learning_rate": 1.8722903192377355e-05, + "loss": 0.9319, + "step": 1120 + }, + { + "epoch": 0.19, + "learning_rate": 1.872024687355032e-05, + "loss": 0.9328, + "step": 1121 + }, + { + "epoch": 0.19, + "learning_rate": 1.871758798390336e-05, + "loss": 0.9335, + "step": 1122 + }, + { + "epoch": 0.19, + "learning_rate": 1.871492652422035e-05, + "loss": 0.9186, + "step": 1123 + }, + { + "epoch": 0.19, + "learning_rate": 1.8712262495285917e-05, + "loss": 1.0427, + "step": 1124 + }, + { + "epoch": 0.19, + "learning_rate": 1.870959589788544e-05, + "loss": 0.9567, + "step": 1125 + }, + { + "epoch": 0.19, + "learning_rate": 1.8706926732805065e-05, + "loss": 1.0185, + "step": 1126 + }, + { + "epoch": 0.19, + "learning_rate": 1.8704255000831688e-05, + "loss": 0.9529, + "step": 1127 + }, + { + "epoch": 0.19, + "learning_rate": 1.8701580702752963e-05, + "loss": 0.9834, + "step": 1128 + }, + { + "epoch": 0.19, + "learning_rate": 1.8698903839357304e-05, + "loss": 0.9333, + "step": 1129 + }, + { + "epoch": 0.19, + "learning_rate": 1.869622441143387e-05, + "loss": 0.9091, + "step": 1130 + }, + { + "epoch": 0.19, + "learning_rate": 1.8693542419772593e-05, + "loss": 1.0085, + "step": 1131 + }, + { + "epoch": 0.19, + "learning_rate": 1.8690857865164145e-05, + "loss": 1.0083, + "step": 1132 + }, + { + "epoch": 0.19, + "learning_rate": 1.868817074839997e-05, + "loss": 1.0375, + "step": 1133 + }, + { + "epoch": 0.19, + "learning_rate": 1.868548107027225e-05, + "loss": 0.9291, + "step": 1134 + }, + { + "epoch": 0.19, + "learning_rate": 1.8682788831573932e-05, + "loss": 1.0179, + "step": 1135 + }, + { + "epoch": 0.19, + "learning_rate": 1.8680094033098718e-05, + "loss": 1.039, + "step": 1136 + }, + { + "epoch": 0.19, + "learning_rate": 1.8677396675641062e-05, + "loss": 0.9735, + "step": 1137 + }, + { + "epoch": 0.19, + "learning_rate": 1.8674696759996173e-05, + "loss": 0.8872, + "step": 1138 + }, + { + "epoch": 0.19, + "learning_rate": 1.8671994286960014e-05, + "loss": 1.0014, + "step": 1139 + }, + { + "epoch": 0.19, + "learning_rate": 1.8669289257329305e-05, + "loss": 0.9993, + "step": 1140 + }, + { + "epoch": 0.19, + "learning_rate": 1.8666581671901513e-05, + "loss": 0.9696, + "step": 1141 + }, + { + "epoch": 0.19, + "learning_rate": 1.866387153147486e-05, + "loss": 0.9943, + "step": 1142 + }, + { + "epoch": 0.19, + "learning_rate": 1.8661158836848333e-05, + "loss": 0.99, + "step": 1143 + }, + { + "epoch": 0.19, + "learning_rate": 1.8658443588821657e-05, + "loss": 0.9628, + "step": 1144 + }, + { + "epoch": 0.19, + "learning_rate": 1.8655725788195315e-05, + "loss": 0.9624, + "step": 1145 + }, + { + "epoch": 0.19, + "learning_rate": 1.8653005435770546e-05, + "loss": 0.9835, + "step": 1146 + }, + { + "epoch": 0.19, + "learning_rate": 1.8650282532349332e-05, + "loss": 1.0539, + "step": 1147 + }, + { + "epoch": 0.19, + "learning_rate": 1.8647557078734423e-05, + "loss": 0.9636, + "step": 1148 + }, + { + "epoch": 0.19, + "learning_rate": 1.8644829075729303e-05, + "loss": 0.9944, + "step": 1149 + }, + { + "epoch": 0.19, + "learning_rate": 1.864209852413822e-05, + "loss": 1.0179, + "step": 1150 + }, + { + "epoch": 0.19, + "learning_rate": 1.863936542476617e-05, + "loss": 0.9946, + "step": 1151 + }, + { + "epoch": 0.19, + "learning_rate": 1.8636629778418894e-05, + "loss": 0.9957, + "step": 1152 + }, + { + "epoch": 0.19, + "learning_rate": 1.863389158590289e-05, + "loss": 0.9517, + "step": 1153 + }, + { + "epoch": 0.19, + "learning_rate": 1.8631150848025414e-05, + "loss": 0.9281, + "step": 1154 + }, + { + "epoch": 0.19, + "learning_rate": 1.862840756559446e-05, + "loss": 0.9617, + "step": 1155 + }, + { + "epoch": 0.19, + "learning_rate": 1.8625661739418767e-05, + "loss": 1.048, + "step": 1156 + }, + { + "epoch": 0.19, + "learning_rate": 1.8622913370307846e-05, + "loss": 1.0628, + "step": 1157 + }, + { + "epoch": 0.19, + "learning_rate": 1.8620162459071936e-05, + "loss": 0.9671, + "step": 1158 + }, + { + "epoch": 0.19, + "learning_rate": 1.8617409006522042e-05, + "loss": 0.9365, + "step": 1159 + }, + { + "epoch": 0.19, + "learning_rate": 1.8614653013469905e-05, + "loss": 0.9716, + "step": 1160 + }, + { + "epoch": 0.19, + "learning_rate": 1.861189448072802e-05, + "loss": 0.9755, + "step": 1161 + }, + { + "epoch": 0.19, + "learning_rate": 1.8609133409109637e-05, + "loss": 1.0009, + "step": 1162 + }, + { + "epoch": 0.19, + "learning_rate": 1.8606369799428744e-05, + "loss": 0.9219, + "step": 1163 + }, + { + "epoch": 0.2, + "learning_rate": 1.8603603652500085e-05, + "loss": 0.9437, + "step": 1164 + }, + { + "epoch": 0.2, + "learning_rate": 1.860083496913915e-05, + "loss": 1.0225, + "step": 1165 + }, + { + "epoch": 0.2, + "learning_rate": 1.8598063750162166e-05, + "loss": 0.9976, + "step": 1166 + }, + { + "epoch": 0.2, + "learning_rate": 1.859528999638613e-05, + "loss": 0.8956, + "step": 1167 + }, + { + "epoch": 0.2, + "learning_rate": 1.8592513708628767e-05, + "loss": 0.9436, + "step": 1168 + }, + { + "epoch": 0.2, + "learning_rate": 1.8589734887708556e-05, + "loss": 0.3826, + "step": 1169 + }, + { + "epoch": 0.2, + "learning_rate": 1.8586953534444726e-05, + "loss": 0.9986, + "step": 1170 + }, + { + "epoch": 0.2, + "learning_rate": 1.8584169649657244e-05, + "loss": 0.8879, + "step": 1171 + }, + { + "epoch": 0.2, + "learning_rate": 1.8581383234166834e-05, + "loss": 0.9544, + "step": 1172 + }, + { + "epoch": 0.2, + "learning_rate": 1.857859428879495e-05, + "loss": 0.9635, + "step": 1173 + }, + { + "epoch": 0.2, + "learning_rate": 1.8575802814363816e-05, + "loss": 0.9867, + "step": 1174 + }, + { + "epoch": 0.2, + "learning_rate": 1.8573008811696378e-05, + "loss": 0.9904, + "step": 1175 + }, + { + "epoch": 0.2, + "learning_rate": 1.857021228161634e-05, + "loss": 0.979, + "step": 1176 + }, + { + "epoch": 0.2, + "learning_rate": 1.856741322494815e-05, + "loss": 0.9755, + "step": 1177 + }, + { + "epoch": 0.2, + "learning_rate": 1.8564611642517e-05, + "loss": 0.9955, + "step": 1178 + }, + { + "epoch": 0.2, + "learning_rate": 1.8561807535148818e-05, + "loss": 1.034, + "step": 1179 + }, + { + "epoch": 0.2, + "learning_rate": 1.8559000903670293e-05, + "loss": 1.0103, + "step": 1180 + }, + { + "epoch": 0.2, + "learning_rate": 1.8556191748908845e-05, + "loss": 0.9783, + "step": 1181 + }, + { + "epoch": 0.2, + "learning_rate": 1.8553380071692645e-05, + "loss": 1.01, + "step": 1182 + }, + { + "epoch": 0.2, + "learning_rate": 1.8550565872850602e-05, + "loss": 1.0146, + "step": 1183 + }, + { + "epoch": 0.2, + "learning_rate": 1.8547749153212373e-05, + "loss": 0.974, + "step": 1184 + }, + { + "epoch": 0.2, + "learning_rate": 1.8544929913608353e-05, + "loss": 0.9829, + "step": 1185 + }, + { + "epoch": 0.2, + "learning_rate": 1.8542108154869686e-05, + "loss": 0.9724, + "step": 1186 + }, + { + "epoch": 0.2, + "learning_rate": 1.8539283877828257e-05, + "loss": 0.9399, + "step": 1187 + }, + { + "epoch": 0.2, + "learning_rate": 1.8536457083316692e-05, + "loss": 0.9522, + "step": 1188 + }, + { + "epoch": 0.2, + "learning_rate": 1.8533627772168362e-05, + "loss": 0.9535, + "step": 1189 + }, + { + "epoch": 0.2, + "learning_rate": 1.853079594521737e-05, + "loss": 0.9533, + "step": 1190 + }, + { + "epoch": 0.2, + "learning_rate": 1.8527961603298572e-05, + "loss": 0.9894, + "step": 1191 + }, + { + "epoch": 0.2, + "learning_rate": 1.8525124747247566e-05, + "loss": 0.3953, + "step": 1192 + }, + { + "epoch": 0.2, + "learning_rate": 1.852228537790068e-05, + "loss": 0.9874, + "step": 1193 + }, + { + "epoch": 0.2, + "learning_rate": 1.851944349609499e-05, + "loss": 1.0302, + "step": 1194 + }, + { + "epoch": 0.2, + "learning_rate": 1.8516599102668324e-05, + "loss": 0.9583, + "step": 1195 + }, + { + "epoch": 0.2, + "learning_rate": 1.8513752198459224e-05, + "loss": 0.9583, + "step": 1196 + }, + { + "epoch": 0.2, + "learning_rate": 1.851090278430699e-05, + "loss": 0.9739, + "step": 1197 + }, + { + "epoch": 0.2, + "learning_rate": 1.850805086105167e-05, + "loss": 1.0088, + "step": 1198 + }, + { + "epoch": 0.2, + "learning_rate": 1.850519642953403e-05, + "loss": 1.0248, + "step": 1199 + }, + { + "epoch": 0.2, + "learning_rate": 1.850233949059559e-05, + "loss": 0.9533, + "step": 1200 + }, + { + "epoch": 0.2, + "learning_rate": 1.8499480045078602e-05, + "loss": 0.9942, + "step": 1201 + }, + { + "epoch": 0.2, + "learning_rate": 1.8496618093826064e-05, + "loss": 0.3602, + "step": 1202 + }, + { + "epoch": 0.2, + "learning_rate": 1.849375363768171e-05, + "loss": 1.0329, + "step": 1203 + }, + { + "epoch": 0.2, + "learning_rate": 1.8490886677490007e-05, + "loss": 0.9469, + "step": 1204 + }, + { + "epoch": 0.2, + "learning_rate": 1.8488017214096173e-05, + "loss": 1.0235, + "step": 1205 + }, + { + "epoch": 0.2, + "learning_rate": 1.8485145248346147e-05, + "loss": 0.9493, + "step": 1206 + }, + { + "epoch": 0.2, + "learning_rate": 1.848227078108662e-05, + "loss": 0.958, + "step": 1207 + }, + { + "epoch": 0.2, + "learning_rate": 1.847939381316501e-05, + "loss": 0.9575, + "step": 1208 + }, + { + "epoch": 0.2, + "learning_rate": 1.8476514345429485e-05, + "loss": 1.0123, + "step": 1209 + }, + { + "epoch": 0.2, + "learning_rate": 1.8473632378728932e-05, + "loss": 1.0039, + "step": 1210 + }, + { + "epoch": 0.2, + "learning_rate": 1.8470747913912993e-05, + "loss": 1.018, + "step": 1211 + }, + { + "epoch": 0.2, + "learning_rate": 1.8467860951832035e-05, + "loss": 0.9605, + "step": 1212 + }, + { + "epoch": 0.2, + "learning_rate": 1.8464971493337167e-05, + "loss": 0.9897, + "step": 1213 + }, + { + "epoch": 0.2, + "learning_rate": 1.8462079539280233e-05, + "loss": 0.951, + "step": 1214 + }, + { + "epoch": 0.2, + "learning_rate": 1.8459185090513802e-05, + "loss": 0.9572, + "step": 1215 + }, + { + "epoch": 0.2, + "learning_rate": 1.8456288147891196e-05, + "loss": 0.9909, + "step": 1216 + }, + { + "epoch": 0.2, + "learning_rate": 1.8453388712266464e-05, + "loss": 0.9605, + "step": 1217 + }, + { + "epoch": 0.2, + "learning_rate": 1.8450486784494384e-05, + "loss": 0.978, + "step": 1218 + }, + { + "epoch": 0.2, + "learning_rate": 1.844758236543048e-05, + "loss": 0.9735, + "step": 1219 + }, + { + "epoch": 0.2, + "learning_rate": 1.8444675455931006e-05, + "loss": 0.9986, + "step": 1220 + }, + { + "epoch": 0.2, + "learning_rate": 1.8441766056852947e-05, + "loss": 0.9732, + "step": 1221 + }, + { + "epoch": 0.2, + "learning_rate": 1.8438854169054022e-05, + "loss": 0.9316, + "step": 1222 + }, + { + "epoch": 0.21, + "learning_rate": 1.8435939793392686e-05, + "loss": 0.96, + "step": 1223 + }, + { + "epoch": 0.21, + "learning_rate": 1.8433022930728132e-05, + "loss": 0.9164, + "step": 1224 + }, + { + "epoch": 0.21, + "learning_rate": 1.8430103581920278e-05, + "loss": 0.9345, + "step": 1225 + }, + { + "epoch": 0.21, + "learning_rate": 1.842718174782978e-05, + "loss": 0.9485, + "step": 1226 + }, + { + "epoch": 0.21, + "learning_rate": 1.8424257429318027e-05, + "loss": 0.9401, + "step": 1227 + }, + { + "epoch": 0.21, + "learning_rate": 1.8421330627247137e-05, + "loss": 1.0052, + "step": 1228 + }, + { + "epoch": 0.21, + "learning_rate": 1.841840134247996e-05, + "loss": 0.9294, + "step": 1229 + }, + { + "epoch": 0.21, + "learning_rate": 1.8415469575880078e-05, + "loss": 0.9974, + "step": 1230 + }, + { + "epoch": 0.21, + "learning_rate": 1.8412535328311813e-05, + "loss": 0.9776, + "step": 1231 + }, + { + "epoch": 0.21, + "learning_rate": 1.840959860064021e-05, + "loss": 1.0029, + "step": 1232 + }, + { + "epoch": 0.21, + "learning_rate": 1.840665939373104e-05, + "loss": 0.996, + "step": 1233 + }, + { + "epoch": 0.21, + "learning_rate": 1.8403717708450823e-05, + "loss": 0.9609, + "step": 1234 + }, + { + "epoch": 0.21, + "learning_rate": 1.8400773545666788e-05, + "loss": 0.9621, + "step": 1235 + }, + { + "epoch": 0.21, + "learning_rate": 1.8397826906246913e-05, + "loss": 0.982, + "step": 1236 + }, + { + "epoch": 0.21, + "learning_rate": 1.8394877791059895e-05, + "loss": 0.9833, + "step": 1237 + }, + { + "epoch": 0.21, + "learning_rate": 1.8391926200975162e-05, + "loss": 1.0106, + "step": 1238 + }, + { + "epoch": 0.21, + "learning_rate": 1.838897213686288e-05, + "loss": 0.93, + "step": 1239 + }, + { + "epoch": 0.21, + "learning_rate": 1.838601559959393e-05, + "loss": 0.9391, + "step": 1240 + }, + { + "epoch": 0.21, + "learning_rate": 1.8383056590039932e-05, + "loss": 0.9743, + "step": 1241 + }, + { + "epoch": 0.21, + "learning_rate": 1.8380095109073236e-05, + "loss": 1.0233, + "step": 1242 + }, + { + "epoch": 0.21, + "learning_rate": 1.8377131157566917e-05, + "loss": 0.996, + "step": 1243 + }, + { + "epoch": 0.21, + "learning_rate": 1.8374164736394777e-05, + "loss": 0.8958, + "step": 1244 + }, + { + "epoch": 0.21, + "learning_rate": 1.8371195846431355e-05, + "loss": 0.9439, + "step": 1245 + }, + { + "epoch": 0.21, + "learning_rate": 1.8368224488551898e-05, + "loss": 0.9664, + "step": 1246 + }, + { + "epoch": 0.21, + "learning_rate": 1.83652506636324e-05, + "loss": 0.3604, + "step": 1247 + }, + { + "epoch": 0.21, + "learning_rate": 1.8362274372549577e-05, + "loss": 0.9639, + "step": 1248 + }, + { + "epoch": 0.21, + "learning_rate": 1.835929561618087e-05, + "loss": 0.9559, + "step": 1249 + }, + { + "epoch": 0.21, + "learning_rate": 1.835631439540445e-05, + "loss": 0.9131, + "step": 1250 + }, + { + "epoch": 0.21, + "learning_rate": 1.83533307110992e-05, + "loss": 0.9813, + "step": 1251 + }, + { + "epoch": 0.21, + "learning_rate": 1.835034456414476e-05, + "loss": 0.969, + "step": 1252 + }, + { + "epoch": 0.21, + "learning_rate": 1.8347355955421462e-05, + "loss": 0.8885, + "step": 1253 + }, + { + "epoch": 0.21, + "learning_rate": 1.8344364885810388e-05, + "loss": 0.9625, + "step": 1254 + }, + { + "epoch": 0.21, + "learning_rate": 1.8341371356193334e-05, + "loss": 0.9515, + "step": 1255 + }, + { + "epoch": 0.21, + "learning_rate": 1.8338375367452822e-05, + "loss": 0.9895, + "step": 1256 + }, + { + "epoch": 0.21, + "learning_rate": 1.8335376920472098e-05, + "loss": 1.0174, + "step": 1257 + }, + { + "epoch": 0.21, + "learning_rate": 1.8332376016135146e-05, + "loss": 0.957, + "step": 1258 + }, + { + "epoch": 0.21, + "learning_rate": 1.832937265532665e-05, + "loss": 1.0354, + "step": 1259 + }, + { + "epoch": 0.21, + "learning_rate": 1.8326366838932048e-05, + "loss": 1.0098, + "step": 1260 + }, + { + "epoch": 0.21, + "learning_rate": 1.8323358567837474e-05, + "loss": 0.9979, + "step": 1261 + }, + { + "epoch": 0.21, + "learning_rate": 1.83203478429298e-05, + "loss": 0.91, + "step": 1262 + }, + { + "epoch": 0.21, + "learning_rate": 1.831733466509662e-05, + "loss": 0.988, + "step": 1263 + }, + { + "epoch": 0.21, + "learning_rate": 1.8314319035226254e-05, + "loss": 0.9491, + "step": 1264 + }, + { + "epoch": 0.21, + "learning_rate": 1.8311300954207737e-05, + "loss": 0.9889, + "step": 1265 + }, + { + "epoch": 0.21, + "learning_rate": 1.8308280422930832e-05, + "loss": 0.9686, + "step": 1266 + }, + { + "epoch": 0.21, + "learning_rate": 1.830525744228602e-05, + "loss": 1.0163, + "step": 1267 + }, + { + "epoch": 0.21, + "learning_rate": 1.8302232013164518e-05, + "loss": 1.0825, + "step": 1268 + }, + { + "epoch": 0.21, + "learning_rate": 1.829920413645824e-05, + "loss": 0.9335, + "step": 1269 + }, + { + "epoch": 0.21, + "learning_rate": 1.8296173813059844e-05, + "loss": 0.8598, + "step": 1270 + }, + { + "epoch": 0.21, + "learning_rate": 1.82931410438627e-05, + "loss": 0.9452, + "step": 1271 + }, + { + "epoch": 0.21, + "learning_rate": 1.8290105829760902e-05, + "loss": 0.9687, + "step": 1272 + }, + { + "epoch": 0.21, + "learning_rate": 1.8287068171649257e-05, + "loss": 1.0124, + "step": 1273 + }, + { + "epoch": 0.21, + "learning_rate": 1.82840280704233e-05, + "loss": 0.9469, + "step": 1274 + }, + { + "epoch": 0.21, + "learning_rate": 1.828098552697929e-05, + "loss": 0.9716, + "step": 1275 + }, + { + "epoch": 0.21, + "learning_rate": 1.8277940542214195e-05, + "loss": 0.9915, + "step": 1276 + }, + { + "epoch": 0.21, + "learning_rate": 1.827489311702571e-05, + "loss": 0.9451, + "step": 1277 + }, + { + "epoch": 0.21, + "learning_rate": 1.8271843252312248e-05, + "loss": 0.9634, + "step": 1278 + }, + { + "epoch": 0.21, + "learning_rate": 1.8268790948972942e-05, + "loss": 0.3493, + "step": 1279 + }, + { + "epoch": 0.21, + "learning_rate": 1.8265736207907637e-05, + "loss": 1.0231, + "step": 1280 + }, + { + "epoch": 0.21, + "learning_rate": 1.8262679030016913e-05, + "loss": 0.9534, + "step": 1281 + }, + { + "epoch": 0.21, + "learning_rate": 1.825961941620205e-05, + "loss": 0.9779, + "step": 1282 + }, + { + "epoch": 0.22, + "learning_rate": 1.8256557367365057e-05, + "loss": 0.9432, + "step": 1283 + }, + { + "epoch": 0.22, + "learning_rate": 1.8253492884408658e-05, + "loss": 0.8975, + "step": 1284 + }, + { + "epoch": 0.22, + "learning_rate": 1.825042596823629e-05, + "loss": 0.9529, + "step": 1285 + }, + { + "epoch": 0.22, + "learning_rate": 1.824735661975212e-05, + "loss": 0.9617, + "step": 1286 + }, + { + "epoch": 0.22, + "learning_rate": 1.824428483986102e-05, + "loss": 0.9634, + "step": 1287 + }, + { + "epoch": 0.22, + "learning_rate": 1.8241210629468586e-05, + "loss": 0.9904, + "step": 1288 + }, + { + "epoch": 0.22, + "learning_rate": 1.823813398948112e-05, + "loss": 1.0188, + "step": 1289 + }, + { + "epoch": 0.22, + "learning_rate": 1.8235054920805653e-05, + "loss": 0.9324, + "step": 1290 + }, + { + "epoch": 0.22, + "learning_rate": 1.8231973424349924e-05, + "loss": 0.9396, + "step": 1291 + }, + { + "epoch": 0.22, + "learning_rate": 1.8228889501022395e-05, + "loss": 0.9442, + "step": 1292 + }, + { + "epoch": 0.22, + "learning_rate": 1.8225803151732236e-05, + "loss": 0.9794, + "step": 1293 + }, + { + "epoch": 0.22, + "learning_rate": 1.822271437738933e-05, + "loss": 1.0204, + "step": 1294 + }, + { + "epoch": 0.22, + "learning_rate": 1.821962317890429e-05, + "loss": 0.9436, + "step": 1295 + }, + { + "epoch": 0.22, + "learning_rate": 1.8216529557188424e-05, + "loss": 0.9828, + "step": 1296 + }, + { + "epoch": 0.22, + "learning_rate": 1.821343351315377e-05, + "loss": 0.9462, + "step": 1297 + }, + { + "epoch": 0.22, + "learning_rate": 1.8210335047713074e-05, + "loss": 0.8913, + "step": 1298 + }, + { + "epoch": 0.22, + "learning_rate": 1.8207234161779793e-05, + "loss": 0.9707, + "step": 1299 + }, + { + "epoch": 0.22, + "learning_rate": 1.82041308562681e-05, + "loss": 0.9658, + "step": 1300 + }, + { + "epoch": 0.22, + "learning_rate": 1.820102513209289e-05, + "loss": 0.9785, + "step": 1301 + }, + { + "epoch": 0.22, + "learning_rate": 1.819791699016975e-05, + "loss": 1.0213, + "step": 1302 + }, + { + "epoch": 0.22, + "learning_rate": 1.8194806431415005e-05, + "loss": 1.0035, + "step": 1303 + }, + { + "epoch": 0.22, + "learning_rate": 1.8191693456745673e-05, + "loss": 0.938, + "step": 1304 + }, + { + "epoch": 0.22, + "learning_rate": 1.818857806707949e-05, + "loss": 1.029, + "step": 1305 + }, + { + "epoch": 0.22, + "learning_rate": 1.8185460263334914e-05, + "loss": 0.922, + "step": 1306 + }, + { + "epoch": 0.22, + "learning_rate": 1.81823400464311e-05, + "loss": 0.9415, + "step": 1307 + }, + { + "epoch": 0.22, + "learning_rate": 1.817921741728792e-05, + "loss": 0.9821, + "step": 1308 + }, + { + "epoch": 0.22, + "learning_rate": 1.817609237682596e-05, + "loss": 0.3507, + "step": 1309 + }, + { + "epoch": 0.22, + "learning_rate": 1.817296492596651e-05, + "loss": 0.9967, + "step": 1310 + }, + { + "epoch": 0.22, + "learning_rate": 1.816983506563158e-05, + "loss": 0.9574, + "step": 1311 + }, + { + "epoch": 0.22, + "learning_rate": 1.816670279674389e-05, + "loss": 0.9727, + "step": 1312 + }, + { + "epoch": 0.22, + "learning_rate": 1.8163568120226856e-05, + "loss": 0.9329, + "step": 1313 + }, + { + "epoch": 0.22, + "learning_rate": 1.8160431037004618e-05, + "loss": 0.9658, + "step": 1314 + }, + { + "epoch": 0.22, + "learning_rate": 1.815729154800202e-05, + "loss": 0.3549, + "step": 1315 + }, + { + "epoch": 0.22, + "learning_rate": 1.815414965414462e-05, + "loss": 0.9622, + "step": 1316 + }, + { + "epoch": 0.22, + "learning_rate": 1.8151005356358673e-05, + "loss": 1.0136, + "step": 1317 + }, + { + "epoch": 0.22, + "learning_rate": 1.8147858655571156e-05, + "loss": 0.9385, + "step": 1318 + }, + { + "epoch": 0.22, + "learning_rate": 1.814470955270975e-05, + "loss": 0.918, + "step": 1319 + }, + { + "epoch": 0.22, + "learning_rate": 1.8141558048702844e-05, + "loss": 0.9565, + "step": 1320 + }, + { + "epoch": 0.22, + "learning_rate": 1.813840414447953e-05, + "loss": 0.9748, + "step": 1321 + }, + { + "epoch": 0.22, + "learning_rate": 1.8135247840969615e-05, + "loss": 1.0054, + "step": 1322 + }, + { + "epoch": 0.22, + "learning_rate": 1.8132089139103612e-05, + "loss": 0.961, + "step": 1323 + }, + { + "epoch": 0.22, + "learning_rate": 1.8128928039812736e-05, + "loss": 0.9038, + "step": 1324 + }, + { + "epoch": 0.22, + "learning_rate": 1.8125764544028913e-05, + "loss": 0.9568, + "step": 1325 + }, + { + "epoch": 0.22, + "learning_rate": 1.8122598652684774e-05, + "loss": 0.9795, + "step": 1326 + }, + { + "epoch": 0.22, + "learning_rate": 1.8119430366713658e-05, + "loss": 1.002, + "step": 1327 + }, + { + "epoch": 0.22, + "learning_rate": 1.8116259687049612e-05, + "loss": 0.9477, + "step": 1328 + }, + { + "epoch": 0.22, + "learning_rate": 1.8113086614627377e-05, + "loss": 0.9465, + "step": 1329 + }, + { + "epoch": 0.22, + "learning_rate": 1.8109911150382417e-05, + "loss": 0.9045, + "step": 1330 + }, + { + "epoch": 0.22, + "learning_rate": 1.8106733295250885e-05, + "loss": 0.9708, + "step": 1331 + }, + { + "epoch": 0.22, + "learning_rate": 1.8103553050169652e-05, + "loss": 0.9414, + "step": 1332 + }, + { + "epoch": 0.22, + "learning_rate": 1.810037041607628e-05, + "loss": 0.9644, + "step": 1333 + }, + { + "epoch": 0.22, + "learning_rate": 1.809718539390905e-05, + "loss": 0.9477, + "step": 1334 + }, + { + "epoch": 0.22, + "learning_rate": 1.8093997984606936e-05, + "loss": 0.97, + "step": 1335 + }, + { + "epoch": 0.22, + "learning_rate": 1.809080818910962e-05, + "loss": 1.0016, + "step": 1336 + }, + { + "epoch": 0.22, + "learning_rate": 1.8087616008357488e-05, + "loss": 0.9578, + "step": 1337 + }, + { + "epoch": 0.22, + "learning_rate": 1.8084421443291633e-05, + "loss": 0.9412, + "step": 1338 + }, + { + "epoch": 0.22, + "learning_rate": 1.8081224494853834e-05, + "loss": 0.9971, + "step": 1339 + }, + { + "epoch": 0.22, + "learning_rate": 1.8078025163986595e-05, + "loss": 0.9379, + "step": 1340 + }, + { + "epoch": 0.22, + "learning_rate": 1.807482345163311e-05, + "loss": 0.894, + "step": 1341 + }, + { + "epoch": 0.22, + "learning_rate": 1.8071619358737274e-05, + "loss": 0.948, + "step": 1342 + }, + { + "epoch": 0.23, + "learning_rate": 1.8068412886243692e-05, + "loss": 1.0351, + "step": 1343 + }, + { + "epoch": 0.23, + "learning_rate": 1.806520403509766e-05, + "loss": 0.9722, + "step": 1344 + }, + { + "epoch": 0.23, + "learning_rate": 1.8061992806245186e-05, + "loss": 0.9087, + "step": 1345 + }, + { + "epoch": 0.23, + "learning_rate": 1.805877920063297e-05, + "loss": 0.9323, + "step": 1346 + }, + { + "epoch": 0.23, + "learning_rate": 1.805556321920842e-05, + "loss": 0.9845, + "step": 1347 + }, + { + "epoch": 0.23, + "learning_rate": 1.8052344862919637e-05, + "loss": 0.9891, + "step": 1348 + }, + { + "epoch": 0.23, + "learning_rate": 1.804912413271543e-05, + "loss": 0.9483, + "step": 1349 + }, + { + "epoch": 0.23, + "learning_rate": 1.8045901029545304e-05, + "loss": 0.9656, + "step": 1350 + }, + { + "epoch": 0.23, + "learning_rate": 1.8042675554359456e-05, + "loss": 0.9399, + "step": 1351 + }, + { + "epoch": 0.23, + "learning_rate": 1.80394477081088e-05, + "loss": 0.9908, + "step": 1352 + }, + { + "epoch": 0.23, + "learning_rate": 1.8036217491744935e-05, + "loss": 0.36, + "step": 1353 + }, + { + "epoch": 0.23, + "learning_rate": 1.803298490622016e-05, + "loss": 0.9831, + "step": 1354 + }, + { + "epoch": 0.23, + "learning_rate": 1.8029749952487474e-05, + "loss": 0.9578, + "step": 1355 + }, + { + "epoch": 0.23, + "learning_rate": 1.8026512631500583e-05, + "loss": 0.9787, + "step": 1356 + }, + { + "epoch": 0.23, + "learning_rate": 1.8023272944213875e-05, + "loss": 0.9668, + "step": 1357 + }, + { + "epoch": 0.23, + "learning_rate": 1.802003089158245e-05, + "loss": 0.8953, + "step": 1358 + }, + { + "epoch": 0.23, + "learning_rate": 1.8016786474562093e-05, + "loss": 0.9142, + "step": 1359 + }, + { + "epoch": 0.23, + "learning_rate": 1.8013539694109293e-05, + "loss": 0.9472, + "step": 1360 + }, + { + "epoch": 0.23, + "learning_rate": 1.8010290551181237e-05, + "loss": 0.9636, + "step": 1361 + }, + { + "epoch": 0.23, + "learning_rate": 1.8007039046735807e-05, + "loss": 0.948, + "step": 1362 + }, + { + "epoch": 0.23, + "learning_rate": 1.800378518173158e-05, + "loss": 0.9708, + "step": 1363 + }, + { + "epoch": 0.23, + "learning_rate": 1.8000528957127828e-05, + "loss": 0.9859, + "step": 1364 + }, + { + "epoch": 0.23, + "learning_rate": 1.7997270373884523e-05, + "loss": 0.9618, + "step": 1365 + }, + { + "epoch": 0.23, + "learning_rate": 1.7994009432962326e-05, + "loss": 1.0097, + "step": 1366 + }, + { + "epoch": 0.23, + "learning_rate": 1.7990746135322593e-05, + "loss": 0.9195, + "step": 1367 + }, + { + "epoch": 0.23, + "learning_rate": 1.798748048192739e-05, + "loss": 1.0004, + "step": 1368 + }, + { + "epoch": 0.23, + "learning_rate": 1.798421247373945e-05, + "loss": 0.9393, + "step": 1369 + }, + { + "epoch": 0.23, + "learning_rate": 1.7980942111722234e-05, + "loss": 0.9771, + "step": 1370 + }, + { + "epoch": 0.23, + "learning_rate": 1.7977669396839866e-05, + "loss": 0.9909, + "step": 1371 + }, + { + "epoch": 0.23, + "learning_rate": 1.797439433005718e-05, + "loss": 1.0087, + "step": 1372 + }, + { + "epoch": 0.23, + "learning_rate": 1.7971116912339698e-05, + "loss": 0.9926, + "step": 1373 + }, + { + "epoch": 0.23, + "learning_rate": 1.7967837144653643e-05, + "loss": 0.3581, + "step": 1374 + }, + { + "epoch": 0.23, + "learning_rate": 1.796455502796592e-05, + "loss": 1.0291, + "step": 1375 + }, + { + "epoch": 0.23, + "learning_rate": 1.796127056324413e-05, + "loss": 0.9419, + "step": 1376 + }, + { + "epoch": 0.23, + "learning_rate": 1.795798375145657e-05, + "loss": 0.9027, + "step": 1377 + }, + { + "epoch": 0.23, + "learning_rate": 1.7954694593572225e-05, + "loss": 0.9643, + "step": 1378 + }, + { + "epoch": 0.23, + "learning_rate": 1.795140309056078e-05, + "loss": 0.9529, + "step": 1379 + }, + { + "epoch": 0.23, + "learning_rate": 1.794810924339259e-05, + "loss": 0.3777, + "step": 1380 + }, + { + "epoch": 0.23, + "learning_rate": 1.7944813053038733e-05, + "loss": 0.9297, + "step": 1381 + }, + { + "epoch": 0.23, + "learning_rate": 1.7941514520470944e-05, + "loss": 0.9596, + "step": 1382 + }, + { + "epoch": 0.23, + "learning_rate": 1.793821364666168e-05, + "loss": 0.9656, + "step": 1383 + }, + { + "epoch": 0.23, + "learning_rate": 1.7934910432584057e-05, + "loss": 0.9716, + "step": 1384 + }, + { + "epoch": 0.23, + "learning_rate": 1.793160487921191e-05, + "loss": 1.007, + "step": 1385 + }, + { + "epoch": 0.23, + "learning_rate": 1.792829698751975e-05, + "loss": 0.9547, + "step": 1386 + }, + { + "epoch": 0.23, + "learning_rate": 1.7924986758482768e-05, + "loss": 0.9876, + "step": 1387 + }, + { + "epoch": 0.23, + "learning_rate": 1.7921674193076865e-05, + "loss": 0.9858, + "step": 1388 + }, + { + "epoch": 0.23, + "learning_rate": 1.7918359292278614e-05, + "loss": 0.9764, + "step": 1389 + }, + { + "epoch": 0.23, + "learning_rate": 1.7915042057065283e-05, + "loss": 0.9861, + "step": 1390 + }, + { + "epoch": 0.23, + "learning_rate": 1.791172248841483e-05, + "loss": 0.9239, + "step": 1391 + }, + { + "epoch": 0.23, + "learning_rate": 1.7908400587305896e-05, + "loss": 0.9085, + "step": 1392 + }, + { + "epoch": 0.23, + "learning_rate": 1.790507635471781e-05, + "loss": 0.9532, + "step": 1393 + }, + { + "epoch": 0.23, + "learning_rate": 1.79017497916306e-05, + "loss": 0.929, + "step": 1394 + }, + { + "epoch": 0.23, + "learning_rate": 1.789842089902496e-05, + "loss": 0.9725, + "step": 1395 + }, + { + "epoch": 0.23, + "learning_rate": 1.7895089677882288e-05, + "loss": 0.9653, + "step": 1396 + }, + { + "epoch": 0.23, + "learning_rate": 1.7891756129184664e-05, + "loss": 0.9707, + "step": 1397 + }, + { + "epoch": 0.23, + "learning_rate": 1.7888420253914854e-05, + "loss": 0.9656, + "step": 1398 + }, + { + "epoch": 0.23, + "learning_rate": 1.78850820530563e-05, + "loss": 0.9246, + "step": 1399 + }, + { + "epoch": 0.23, + "learning_rate": 1.788174152759315e-05, + "loss": 0.9846, + "step": 1400 + }, + { + "epoch": 0.23, + "learning_rate": 1.787839867851022e-05, + "loss": 1.0135, + "step": 1401 + }, + { + "epoch": 0.24, + "learning_rate": 1.7875053506793015e-05, + "loss": 0.9185, + "step": 1402 + }, + { + "epoch": 0.24, + "learning_rate": 1.7871706013427732e-05, + "loss": 0.9743, + "step": 1403 + }, + { + "epoch": 0.24, + "learning_rate": 1.7868356199401242e-05, + "loss": 0.3534, + "step": 1404 + }, + { + "epoch": 0.24, + "learning_rate": 1.7865004065701107e-05, + "loss": 0.9927, + "step": 1405 + }, + { + "epoch": 0.24, + "learning_rate": 1.786164961331557e-05, + "loss": 0.98, + "step": 1406 + }, + { + "epoch": 0.24, + "learning_rate": 1.7858292843233564e-05, + "loss": 1.0226, + "step": 1407 + }, + { + "epoch": 0.24, + "learning_rate": 1.7854933756444692e-05, + "loss": 0.9979, + "step": 1408 + }, + { + "epoch": 0.24, + "learning_rate": 1.7851572353939253e-05, + "loss": 0.9408, + "step": 1409 + }, + { + "epoch": 0.24, + "learning_rate": 1.784820863670822e-05, + "loss": 1.0039, + "step": 1410 + }, + { + "epoch": 0.24, + "learning_rate": 1.7844842605743256e-05, + "loss": 0.9722, + "step": 1411 + }, + { + "epoch": 0.24, + "learning_rate": 1.78414742620367e-05, + "loss": 0.3633, + "step": 1412 + }, + { + "epoch": 0.24, + "learning_rate": 1.783810360658158e-05, + "loss": 0.9384, + "step": 1413 + }, + { + "epoch": 0.24, + "learning_rate": 1.783473064037159e-05, + "loss": 0.9721, + "step": 1414 + }, + { + "epoch": 0.24, + "learning_rate": 1.7831355364401123e-05, + "loss": 0.9535, + "step": 1415 + }, + { + "epoch": 0.24, + "learning_rate": 1.7827977779665245e-05, + "loss": 0.9954, + "step": 1416 + }, + { + "epoch": 0.24, + "learning_rate": 1.7824597887159704e-05, + "loss": 0.9534, + "step": 1417 + }, + { + "epoch": 0.24, + "learning_rate": 1.782121568788092e-05, + "loss": 1.0765, + "step": 1418 + }, + { + "epoch": 0.24, + "learning_rate": 1.781783118282601e-05, + "loss": 0.8943, + "step": 1419 + }, + { + "epoch": 0.24, + "learning_rate": 1.781444437299276e-05, + "loss": 0.9462, + "step": 1420 + }, + { + "epoch": 0.24, + "learning_rate": 1.7811055259379635e-05, + "loss": 1.0293, + "step": 1421 + }, + { + "epoch": 0.24, + "learning_rate": 1.7807663842985777e-05, + "loss": 0.3537, + "step": 1422 + }, + { + "epoch": 0.24, + "learning_rate": 1.780427012481102e-05, + "loss": 0.945, + "step": 1423 + }, + { + "epoch": 0.24, + "learning_rate": 1.7800874105855862e-05, + "loss": 0.9088, + "step": 1424 + }, + { + "epoch": 0.24, + "learning_rate": 1.7797475787121485e-05, + "loss": 1.0192, + "step": 1425 + }, + { + "epoch": 0.24, + "learning_rate": 1.7794075169609746e-05, + "loss": 0.9397, + "step": 1426 + }, + { + "epoch": 0.24, + "learning_rate": 1.779067225432319e-05, + "loss": 0.9259, + "step": 1427 + }, + { + "epoch": 0.24, + "learning_rate": 1.7787267042265028e-05, + "loss": 0.9184, + "step": 1428 + }, + { + "epoch": 0.24, + "learning_rate": 1.778385953443915e-05, + "loss": 0.9871, + "step": 1429 + }, + { + "epoch": 0.24, + "learning_rate": 1.778044973185013e-05, + "loss": 0.8936, + "step": 1430 + }, + { + "epoch": 0.24, + "learning_rate": 1.777703763550321e-05, + "loss": 1.0389, + "step": 1431 + }, + { + "epoch": 0.24, + "learning_rate": 1.7773623246404312e-05, + "loss": 0.9231, + "step": 1432 + }, + { + "epoch": 0.24, + "learning_rate": 1.7770206565560034e-05, + "loss": 0.3514, + "step": 1433 + }, + { + "epoch": 0.24, + "learning_rate": 1.7766787593977648e-05, + "loss": 0.9413, + "step": 1434 + }, + { + "epoch": 0.24, + "learning_rate": 1.7763366332665106e-05, + "loss": 0.9253, + "step": 1435 + }, + { + "epoch": 0.24, + "learning_rate": 1.7759942782631027e-05, + "loss": 0.942, + "step": 1436 + }, + { + "epoch": 0.24, + "learning_rate": 1.7756516944884713e-05, + "loss": 1.0019, + "step": 1437 + }, + { + "epoch": 0.24, + "learning_rate": 1.775308882043613e-05, + "loss": 0.9388, + "step": 1438 + }, + { + "epoch": 0.24, + "learning_rate": 1.7749658410295935e-05, + "loss": 0.9604, + "step": 1439 + }, + { + "epoch": 0.24, + "learning_rate": 1.7746225715475444e-05, + "loss": 0.9799, + "step": 1440 + }, + { + "epoch": 0.24, + "learning_rate": 1.7742790736986652e-05, + "loss": 0.9406, + "step": 1441 + }, + { + "epoch": 0.24, + "learning_rate": 1.7739353475842225e-05, + "loss": 0.9533, + "step": 1442 + }, + { + "epoch": 0.24, + "learning_rate": 1.7735913933055506e-05, + "loss": 0.9465, + "step": 1443 + }, + { + "epoch": 0.24, + "learning_rate": 1.7732472109640504e-05, + "loss": 0.9853, + "step": 1444 + }, + { + "epoch": 0.24, + "learning_rate": 1.7729028006611908e-05, + "loss": 0.9302, + "step": 1445 + }, + { + "epoch": 0.24, + "learning_rate": 1.7725581624985073e-05, + "loss": 0.9813, + "step": 1446 + }, + { + "epoch": 0.24, + "learning_rate": 1.7722132965776035e-05, + "loss": 0.9321, + "step": 1447 + }, + { + "epoch": 0.24, + "learning_rate": 1.7718682030001485e-05, + "loss": 0.978, + "step": 1448 + }, + { + "epoch": 0.24, + "learning_rate": 1.77152288186788e-05, + "loss": 1.0304, + "step": 1449 + }, + { + "epoch": 0.24, + "learning_rate": 1.7711773332826023e-05, + "loss": 0.945, + "step": 1450 + }, + { + "epoch": 0.24, + "learning_rate": 1.770831557346187e-05, + "loss": 1.0084, + "step": 1451 + }, + { + "epoch": 0.24, + "learning_rate": 1.7704855541605715e-05, + "loss": 0.9685, + "step": 1452 + }, + { + "epoch": 0.24, + "learning_rate": 1.7701393238277626e-05, + "loss": 1.027, + "step": 1453 + }, + { + "epoch": 0.24, + "learning_rate": 1.7697928664498308e-05, + "loss": 0.9741, + "step": 1454 + }, + { + "epoch": 0.24, + "learning_rate": 1.769446182128917e-05, + "loss": 0.9609, + "step": 1455 + }, + { + "epoch": 0.24, + "learning_rate": 1.769099270967227e-05, + "loss": 0.9568, + "step": 1456 + }, + { + "epoch": 0.24, + "learning_rate": 1.7687521330670333e-05, + "loss": 0.9866, + "step": 1457 + }, + { + "epoch": 0.24, + "learning_rate": 1.768404768530676e-05, + "loss": 0.971, + "step": 1458 + }, + { + "epoch": 0.24, + "learning_rate": 1.768057177460562e-05, + "loss": 0.9849, + "step": 1459 + }, + { + "epoch": 0.24, + "learning_rate": 1.7677093599591643e-05, + "loss": 1.0078, + "step": 1460 + }, + { + "epoch": 0.24, + "learning_rate": 1.7673613161290237e-05, + "loss": 1.0142, + "step": 1461 + }, + { + "epoch": 0.25, + "learning_rate": 1.7670130460727465e-05, + "loss": 0.9988, + "step": 1462 + }, + { + "epoch": 0.25, + "learning_rate": 1.7666645498930074e-05, + "loss": 0.9576, + "step": 1463 + }, + { + "epoch": 0.25, + "learning_rate": 1.7663158276925458e-05, + "loss": 0.995, + "step": 1464 + }, + { + "epoch": 0.25, + "learning_rate": 1.765966879574169e-05, + "loss": 0.9757, + "step": 1465 + }, + { + "epoch": 0.25, + "learning_rate": 1.7656177056407508e-05, + "loss": 0.9653, + "step": 1466 + }, + { + "epoch": 0.25, + "learning_rate": 1.7652683059952306e-05, + "loss": 0.9941, + "step": 1467 + }, + { + "epoch": 0.25, + "learning_rate": 1.7649186807406153e-05, + "loss": 0.966, + "step": 1468 + }, + { + "epoch": 0.25, + "learning_rate": 1.7645688299799787e-05, + "loss": 0.9669, + "step": 1469 + }, + { + "epoch": 0.25, + "learning_rate": 1.7642187538164595e-05, + "loss": 0.9751, + "step": 1470 + }, + { + "epoch": 0.25, + "learning_rate": 1.763868452353265e-05, + "loss": 1.008, + "step": 1471 + }, + { + "epoch": 0.25, + "learning_rate": 1.763517925693667e-05, + "loss": 0.9806, + "step": 1472 + }, + { + "epoch": 0.25, + "learning_rate": 1.7631671739410042e-05, + "loss": 0.9733, + "step": 1473 + }, + { + "epoch": 0.25, + "learning_rate": 1.7628161971986822e-05, + "loss": 0.9582, + "step": 1474 + }, + { + "epoch": 0.25, + "learning_rate": 1.7624649955701726e-05, + "loss": 0.9172, + "step": 1475 + }, + { + "epoch": 0.25, + "learning_rate": 1.7621135691590132e-05, + "loss": 0.9108, + "step": 1476 + }, + { + "epoch": 0.25, + "learning_rate": 1.7617619180688087e-05, + "loss": 0.9838, + "step": 1477 + }, + { + "epoch": 0.25, + "learning_rate": 1.7614100424032284e-05, + "loss": 1.0691, + "step": 1478 + }, + { + "epoch": 0.25, + "learning_rate": 1.7610579422660103e-05, + "loss": 1.0068, + "step": 1479 + }, + { + "epoch": 0.25, + "learning_rate": 1.760705617760956e-05, + "loss": 0.9162, + "step": 1480 + }, + { + "epoch": 0.25, + "learning_rate": 1.760353068991935e-05, + "loss": 1.0248, + "step": 1481 + }, + { + "epoch": 0.25, + "learning_rate": 1.7600002960628827e-05, + "loss": 1.0123, + "step": 1482 + }, + { + "epoch": 0.25, + "learning_rate": 1.7596472990777996e-05, + "loss": 0.929, + "step": 1483 + }, + { + "epoch": 0.25, + "learning_rate": 1.7592940781407533e-05, + "loss": 0.9725, + "step": 1484 + }, + { + "epoch": 0.25, + "learning_rate": 1.758940633355877e-05, + "loss": 0.9396, + "step": 1485 + }, + { + "epoch": 0.25, + "learning_rate": 1.7585869648273697e-05, + "loss": 0.9962, + "step": 1486 + }, + { + "epoch": 0.25, + "learning_rate": 1.7582330726594964e-05, + "loss": 1.0007, + "step": 1487 + }, + { + "epoch": 0.25, + "learning_rate": 1.757878956956589e-05, + "loss": 0.9841, + "step": 1488 + }, + { + "epoch": 0.25, + "learning_rate": 1.757524617823044e-05, + "loss": 1.0183, + "step": 1489 + }, + { + "epoch": 0.25, + "learning_rate": 1.7571700553633236e-05, + "loss": 0.9749, + "step": 1490 + }, + { + "epoch": 0.25, + "learning_rate": 1.756815269681958e-05, + "loss": 0.985, + "step": 1491 + }, + { + "epoch": 0.25, + "learning_rate": 1.7564602608835407e-05, + "loss": 0.9864, + "step": 1492 + }, + { + "epoch": 0.25, + "learning_rate": 1.7561050290727322e-05, + "loss": 0.9793, + "step": 1493 + }, + { + "epoch": 0.25, + "learning_rate": 1.7557495743542586e-05, + "loss": 1.0371, + "step": 1494 + }, + { + "epoch": 0.25, + "learning_rate": 1.7553938968329114e-05, + "loss": 1.0098, + "step": 1495 + }, + { + "epoch": 0.25, + "learning_rate": 1.7550379966135486e-05, + "loss": 0.9913, + "step": 1496 + }, + { + "epoch": 0.25, + "learning_rate": 1.754681873801093e-05, + "loss": 0.9899, + "step": 1497 + }, + { + "epoch": 0.25, + "learning_rate": 1.7543255285005335e-05, + "loss": 0.9915, + "step": 1498 + }, + { + "epoch": 0.25, + "learning_rate": 1.753968960816924e-05, + "loss": 0.3454, + "step": 1499 + }, + { + "epoch": 0.25, + "learning_rate": 1.7536121708553845e-05, + "loss": 0.9184, + "step": 1500 + }, + { + "epoch": 0.25, + "learning_rate": 1.7532551587211006e-05, + "loss": 0.9724, + "step": 1501 + }, + { + "epoch": 0.25, + "learning_rate": 1.7528979245193233e-05, + "loss": 0.9086, + "step": 1502 + }, + { + "epoch": 0.25, + "learning_rate": 1.752540468355369e-05, + "loss": 0.9582, + "step": 1503 + }, + { + "epoch": 0.25, + "learning_rate": 1.752182790334619e-05, + "loss": 0.9828, + "step": 1504 + }, + { + "epoch": 0.25, + "learning_rate": 1.7518248905625214e-05, + "loss": 1.0149, + "step": 1505 + }, + { + "epoch": 0.25, + "learning_rate": 1.7514667691445873e-05, + "loss": 0.9615, + "step": 1506 + }, + { + "epoch": 0.25, + "learning_rate": 1.7511084261863965e-05, + "loss": 1.0001, + "step": 1507 + }, + { + "epoch": 0.25, + "learning_rate": 1.750749861793591e-05, + "loss": 0.9213, + "step": 1508 + }, + { + "epoch": 0.25, + "learning_rate": 1.7503910760718797e-05, + "loss": 0.964, + "step": 1509 + }, + { + "epoch": 0.25, + "learning_rate": 1.7500320691270365e-05, + "loss": 0.9518, + "step": 1510 + }, + { + "epoch": 0.25, + "learning_rate": 1.7496728410649e-05, + "loss": 0.9949, + "step": 1511 + }, + { + "epoch": 0.25, + "learning_rate": 1.749313391991375e-05, + "loss": 0.9772, + "step": 1512 + }, + { + "epoch": 0.25, + "learning_rate": 1.74895372201243e-05, + "loss": 0.9434, + "step": 1513 + }, + { + "epoch": 0.25, + "learning_rate": 1.7485938312341003e-05, + "loss": 0.9172, + "step": 1514 + }, + { + "epoch": 0.25, + "learning_rate": 1.7482337197624853e-05, + "loss": 0.9773, + "step": 1515 + }, + { + "epoch": 0.25, + "learning_rate": 1.747873387703749e-05, + "loss": 1.0602, + "step": 1516 + }, + { + "epoch": 0.25, + "learning_rate": 1.7475128351641216e-05, + "loss": 0.9993, + "step": 1517 + }, + { + "epoch": 0.25, + "learning_rate": 1.7471520622498982e-05, + "loss": 0.374, + "step": 1518 + }, + { + "epoch": 0.25, + "learning_rate": 1.7467910690674372e-05, + "loss": 1.0166, + "step": 1519 + }, + { + "epoch": 0.25, + "learning_rate": 1.7464298557231642e-05, + "loss": 0.9271, + "step": 1520 + }, + { + "epoch": 0.25, + "learning_rate": 1.746068422323568e-05, + "loss": 0.3588, + "step": 1521 + }, + { + "epoch": 0.26, + "learning_rate": 1.7457067689752033e-05, + "loss": 0.3371, + "step": 1522 + }, + { + "epoch": 0.26, + "learning_rate": 1.7453448957846896e-05, + "loss": 0.9935, + "step": 1523 + }, + { + "epoch": 0.26, + "learning_rate": 1.7449828028587105e-05, + "loss": 0.9906, + "step": 1524 + }, + { + "epoch": 0.26, + "learning_rate": 1.7446204903040148e-05, + "loss": 0.9644, + "step": 1525 + }, + { + "epoch": 0.26, + "learning_rate": 1.744257958227416e-05, + "loss": 0.9089, + "step": 1526 + }, + { + "epoch": 0.26, + "learning_rate": 1.743895206735792e-05, + "loss": 0.3412, + "step": 1527 + }, + { + "epoch": 0.26, + "learning_rate": 1.7435322359360866e-05, + "loss": 0.9591, + "step": 1528 + }, + { + "epoch": 0.26, + "learning_rate": 1.743169045935307e-05, + "loss": 0.9573, + "step": 1529 + }, + { + "epoch": 0.26, + "learning_rate": 1.7428056368405247e-05, + "loss": 0.9361, + "step": 1530 + }, + { + "epoch": 0.26, + "learning_rate": 1.7424420087588777e-05, + "loss": 1.0099, + "step": 1531 + }, + { + "epoch": 0.26, + "learning_rate": 1.7420781617975667e-05, + "loss": 0.9612, + "step": 1532 + }, + { + "epoch": 0.26, + "learning_rate": 1.7417140960638574e-05, + "loss": 0.9525, + "step": 1533 + }, + { + "epoch": 0.26, + "learning_rate": 1.7413498116650806e-05, + "loss": 0.9851, + "step": 1534 + }, + { + "epoch": 0.26, + "learning_rate": 1.7409853087086312e-05, + "loss": 0.9694, + "step": 1535 + }, + { + "epoch": 0.26, + "learning_rate": 1.7406205873019684e-05, + "loss": 0.9144, + "step": 1536 + }, + { + "epoch": 0.26, + "learning_rate": 1.7402556475526155e-05, + "loss": 0.9844, + "step": 1537 + }, + { + "epoch": 0.26, + "learning_rate": 1.739890489568161e-05, + "loss": 0.9599, + "step": 1538 + }, + { + "epoch": 0.26, + "learning_rate": 1.7395251134562566e-05, + "loss": 0.9297, + "step": 1539 + }, + { + "epoch": 0.26, + "learning_rate": 1.7391595193246197e-05, + "loss": 0.9972, + "step": 1540 + }, + { + "epoch": 0.26, + "learning_rate": 1.7387937072810313e-05, + "loss": 0.8822, + "step": 1541 + }, + { + "epoch": 0.26, + "learning_rate": 1.7384276774333363e-05, + "loss": 0.9451, + "step": 1542 + }, + { + "epoch": 0.26, + "learning_rate": 1.7380614298894443e-05, + "loss": 0.3437, + "step": 1543 + }, + { + "epoch": 0.26, + "learning_rate": 1.7376949647573287e-05, + "loss": 0.9017, + "step": 1544 + }, + { + "epoch": 0.26, + "learning_rate": 1.737328282145027e-05, + "loss": 0.975, + "step": 1545 + }, + { + "epoch": 0.26, + "learning_rate": 1.736961382160642e-05, + "loss": 0.3593, + "step": 1546 + }, + { + "epoch": 0.26, + "learning_rate": 1.736594264912339e-05, + "loss": 0.9139, + "step": 1547 + }, + { + "epoch": 0.26, + "learning_rate": 1.736226930508348e-05, + "loss": 0.9732, + "step": 1548 + }, + { + "epoch": 0.26, + "learning_rate": 1.735859379056963e-05, + "loss": 0.9338, + "step": 1549 + }, + { + "epoch": 0.26, + "learning_rate": 1.7354916106665422e-05, + "loss": 0.9211, + "step": 1550 + }, + { + "epoch": 0.26, + "learning_rate": 1.7351236254455077e-05, + "loss": 0.9658, + "step": 1551 + }, + { + "epoch": 0.26, + "learning_rate": 1.7347554235023447e-05, + "loss": 0.9745, + "step": 1552 + }, + { + "epoch": 0.26, + "learning_rate": 1.734387004945604e-05, + "loss": 0.9691, + "step": 1553 + }, + { + "epoch": 0.26, + "learning_rate": 1.734018369883898e-05, + "loss": 0.8937, + "step": 1554 + }, + { + "epoch": 0.26, + "learning_rate": 1.7336495184259057e-05, + "loss": 0.9597, + "step": 1555 + }, + { + "epoch": 0.26, + "learning_rate": 1.733280450680367e-05, + "loss": 1.003, + "step": 1556 + }, + { + "epoch": 0.26, + "learning_rate": 1.7329111667560875e-05, + "loss": 0.9012, + "step": 1557 + }, + { + "epoch": 0.26, + "learning_rate": 1.732541666761936e-05, + "loss": 0.9645, + "step": 1558 + }, + { + "epoch": 0.26, + "learning_rate": 1.732171950806845e-05, + "loss": 0.9214, + "step": 1559 + }, + { + "epoch": 0.26, + "learning_rate": 1.7318020189998103e-05, + "loss": 0.9487, + "step": 1560 + }, + { + "epoch": 0.26, + "learning_rate": 1.7314318714498922e-05, + "loss": 0.9985, + "step": 1561 + }, + { + "epoch": 0.26, + "learning_rate": 1.7310615082662133e-05, + "loss": 0.9517, + "step": 1562 + }, + { + "epoch": 0.26, + "learning_rate": 1.730690929557961e-05, + "loss": 0.8971, + "step": 1563 + }, + { + "epoch": 0.26, + "learning_rate": 1.730320135434386e-05, + "loss": 0.9942, + "step": 1564 + }, + { + "epoch": 0.26, + "learning_rate": 1.729949126004802e-05, + "loss": 0.9574, + "step": 1565 + }, + { + "epoch": 0.26, + "learning_rate": 1.7295779013785865e-05, + "loss": 0.9676, + "step": 1566 + }, + { + "epoch": 0.26, + "learning_rate": 1.72920646166518e-05, + "loss": 0.997, + "step": 1567 + }, + { + "epoch": 0.26, + "learning_rate": 1.7288348069740878e-05, + "loss": 1.0261, + "step": 1568 + }, + { + "epoch": 0.26, + "learning_rate": 1.7284629374148764e-05, + "loss": 0.9811, + "step": 1569 + }, + { + "epoch": 0.26, + "learning_rate": 1.728090853097178e-05, + "loss": 0.942, + "step": 1570 + }, + { + "epoch": 0.26, + "learning_rate": 1.727718554130686e-05, + "loss": 0.9684, + "step": 1571 + }, + { + "epoch": 0.26, + "learning_rate": 1.7273460406251584e-05, + "loss": 0.9522, + "step": 1572 + }, + { + "epoch": 0.26, + "learning_rate": 1.7269733126904162e-05, + "loss": 0.9383, + "step": 1573 + }, + { + "epoch": 0.26, + "learning_rate": 1.7266003704363432e-05, + "loss": 0.9605, + "step": 1574 + }, + { + "epoch": 0.26, + "learning_rate": 1.726227213972887e-05, + "loss": 0.9818, + "step": 1575 + }, + { + "epoch": 0.26, + "learning_rate": 1.725853843410058e-05, + "loss": 0.984, + "step": 1576 + }, + { + "epoch": 0.26, + "learning_rate": 1.725480258857929e-05, + "loss": 0.9654, + "step": 1577 + }, + { + "epoch": 0.26, + "learning_rate": 1.7251064604266376e-05, + "loss": 0.9579, + "step": 1578 + }, + { + "epoch": 0.26, + "learning_rate": 1.7247324482263832e-05, + "loss": 0.8993, + "step": 1579 + }, + { + "epoch": 0.26, + "learning_rate": 1.7243582223674286e-05, + "loss": 0.9519, + "step": 1580 + }, + { + "epoch": 0.27, + "learning_rate": 1.723983782960099e-05, + "loss": 0.9682, + "step": 1581 + }, + { + "epoch": 0.27, + "learning_rate": 1.7236091301147834e-05, + "loss": 0.9424, + "step": 1582 + }, + { + "epoch": 0.27, + "learning_rate": 1.7232342639419333e-05, + "loss": 0.9582, + "step": 1583 + }, + { + "epoch": 0.27, + "learning_rate": 1.7228591845520633e-05, + "loss": 0.9727, + "step": 1584 + }, + { + "epoch": 0.27, + "learning_rate": 1.7224838920557506e-05, + "loss": 0.3763, + "step": 1585 + }, + { + "epoch": 0.27, + "learning_rate": 1.7221083865636356e-05, + "loss": 0.3552, + "step": 1586 + }, + { + "epoch": 0.27, + "learning_rate": 1.721732668186421e-05, + "loss": 0.9481, + "step": 1587 + }, + { + "epoch": 0.27, + "learning_rate": 1.721356737034872e-05, + "loss": 0.9683, + "step": 1588 + }, + { + "epoch": 0.27, + "learning_rate": 1.720980593219818e-05, + "loss": 0.9672, + "step": 1589 + }, + { + "epoch": 0.27, + "learning_rate": 1.7206042368521493e-05, + "loss": 1.0087, + "step": 1590 + }, + { + "epoch": 0.27, + "learning_rate": 1.7202276680428208e-05, + "loss": 0.9538, + "step": 1591 + }, + { + "epoch": 0.27, + "learning_rate": 1.7198508869028475e-05, + "loss": 0.9774, + "step": 1592 + }, + { + "epoch": 0.27, + "learning_rate": 1.7194738935433094e-05, + "loss": 0.9867, + "step": 1593 + }, + { + "epoch": 0.27, + "learning_rate": 1.719096688075348e-05, + "loss": 0.9636, + "step": 1594 + }, + { + "epoch": 0.27, + "learning_rate": 1.718719270610167e-05, + "loss": 0.8982, + "step": 1595 + }, + { + "epoch": 0.27, + "learning_rate": 1.7183416412590334e-05, + "loss": 0.9768, + "step": 1596 + }, + { + "epoch": 0.27, + "learning_rate": 1.717963800133276e-05, + "loss": 0.9742, + "step": 1597 + }, + { + "epoch": 0.27, + "learning_rate": 1.7175857473442864e-05, + "loss": 0.9961, + "step": 1598 + }, + { + "epoch": 0.27, + "learning_rate": 1.7172074830035185e-05, + "loss": 0.3765, + "step": 1599 + }, + { + "epoch": 0.27, + "learning_rate": 1.7168290072224886e-05, + "loss": 0.9647, + "step": 1600 + }, + { + "epoch": 0.27, + "learning_rate": 1.7164503201127753e-05, + "loss": 0.9212, + "step": 1601 + }, + { + "epoch": 0.27, + "learning_rate": 1.7160714217860196e-05, + "loss": 0.9336, + "step": 1602 + }, + { + "epoch": 0.27, + "learning_rate": 1.7156923123539245e-05, + "loss": 0.9579, + "step": 1603 + }, + { + "epoch": 0.27, + "learning_rate": 1.715312991928256e-05, + "loss": 0.9582, + "step": 1604 + }, + { + "epoch": 0.27, + "learning_rate": 1.714933460620841e-05, + "loss": 0.9404, + "step": 1605 + }, + { + "epoch": 0.27, + "learning_rate": 1.7145537185435694e-05, + "loss": 0.9191, + "step": 1606 + }, + { + "epoch": 0.27, + "learning_rate": 1.7141737658083936e-05, + "loss": 1.0616, + "step": 1607 + }, + { + "epoch": 0.27, + "learning_rate": 1.713793602527327e-05, + "loss": 0.977, + "step": 1608 + }, + { + "epoch": 0.27, + "learning_rate": 1.7134132288124464e-05, + "loss": 0.8857, + "step": 1609 + }, + { + "epoch": 0.27, + "learning_rate": 1.7130326447758898e-05, + "loss": 0.9351, + "step": 1610 + }, + { + "epoch": 0.27, + "learning_rate": 1.7126518505298567e-05, + "loss": 0.9587, + "step": 1611 + }, + { + "epoch": 0.27, + "learning_rate": 1.71227084618661e-05, + "loss": 0.9684, + "step": 1612 + }, + { + "epoch": 0.27, + "learning_rate": 1.7118896318584733e-05, + "loss": 0.9479, + "step": 1613 + }, + { + "epoch": 0.27, + "learning_rate": 1.7115082076578327e-05, + "loss": 0.942, + "step": 1614 + }, + { + "epoch": 0.27, + "learning_rate": 1.711126573697136e-05, + "loss": 0.9773, + "step": 1615 + }, + { + "epoch": 0.27, + "learning_rate": 1.7107447300888932e-05, + "loss": 0.9203, + "step": 1616 + }, + { + "epoch": 0.27, + "learning_rate": 1.710362676945675e-05, + "loss": 0.3345, + "step": 1617 + }, + { + "epoch": 0.27, + "learning_rate": 1.709980414380116e-05, + "loss": 1.036, + "step": 1618 + }, + { + "epoch": 0.27, + "learning_rate": 1.7095979425049098e-05, + "loss": 0.9632, + "step": 1619 + }, + { + "epoch": 0.27, + "learning_rate": 1.7092152614328135e-05, + "loss": 0.9161, + "step": 1620 + }, + { + "epoch": 0.27, + "learning_rate": 1.708832371276646e-05, + "loss": 0.9847, + "step": 1621 + }, + { + "epoch": 0.27, + "learning_rate": 1.708449272149287e-05, + "loss": 0.9604, + "step": 1622 + }, + { + "epoch": 0.27, + "learning_rate": 1.7080659641636784e-05, + "loss": 0.92, + "step": 1623 + }, + { + "epoch": 0.27, + "learning_rate": 1.7076824474328226e-05, + "loss": 0.8939, + "step": 1624 + }, + { + "epoch": 0.27, + "learning_rate": 1.707298722069785e-05, + "loss": 0.3272, + "step": 1625 + }, + { + "epoch": 0.27, + "learning_rate": 1.706914788187692e-05, + "loss": 0.9382, + "step": 1626 + }, + { + "epoch": 0.27, + "learning_rate": 1.7065306458997305e-05, + "loss": 0.9698, + "step": 1627 + }, + { + "epoch": 0.27, + "learning_rate": 1.7061462953191504e-05, + "loss": 0.9096, + "step": 1628 + }, + { + "epoch": 0.27, + "learning_rate": 1.7057617365592624e-05, + "loss": 1.0126, + "step": 1629 + }, + { + "epoch": 0.27, + "learning_rate": 1.705376969733438e-05, + "loss": 0.9658, + "step": 1630 + }, + { + "epoch": 0.27, + "learning_rate": 1.7049919949551103e-05, + "loss": 0.9435, + "step": 1631 + }, + { + "epoch": 0.27, + "learning_rate": 1.704606812337774e-05, + "loss": 0.3357, + "step": 1632 + }, + { + "epoch": 0.27, + "learning_rate": 1.7042214219949854e-05, + "loss": 0.9163, + "step": 1633 + }, + { + "epoch": 0.27, + "learning_rate": 1.7038358240403615e-05, + "loss": 0.9625, + "step": 1634 + }, + { + "epoch": 0.27, + "learning_rate": 1.70345001858758e-05, + "loss": 0.9129, + "step": 1635 + }, + { + "epoch": 0.27, + "learning_rate": 1.7030640057503812e-05, + "loss": 0.9909, + "step": 1636 + }, + { + "epoch": 0.27, + "learning_rate": 1.7026777856425653e-05, + "loss": 0.9609, + "step": 1637 + }, + { + "epoch": 0.27, + "learning_rate": 1.702291358377994e-05, + "loss": 1.0422, + "step": 1638 + }, + { + "epoch": 0.27, + "learning_rate": 1.7019047240705902e-05, + "loss": 0.9601, + "step": 1639 + }, + { + "epoch": 0.27, + "learning_rate": 1.701517882834337e-05, + "loss": 0.907, + "step": 1640 + }, + { + "epoch": 0.28, + "learning_rate": 1.701130834783281e-05, + "loss": 0.9069, + "step": 1641 + }, + { + "epoch": 0.28, + "learning_rate": 1.7007435800315263e-05, + "loss": 0.9344, + "step": 1642 + }, + { + "epoch": 0.28, + "learning_rate": 1.7003561186932403e-05, + "loss": 1.017, + "step": 1643 + }, + { + "epoch": 0.28, + "learning_rate": 1.699968450882651e-05, + "loss": 0.9432, + "step": 1644 + }, + { + "epoch": 0.28, + "learning_rate": 1.6995805767140465e-05, + "loss": 0.9888, + "step": 1645 + }, + { + "epoch": 0.28, + "learning_rate": 1.699192496301776e-05, + "loss": 0.8697, + "step": 1646 + }, + { + "epoch": 0.28, + "learning_rate": 1.69880420976025e-05, + "loss": 0.9514, + "step": 1647 + }, + { + "epoch": 0.28, + "learning_rate": 1.6984157172039393e-05, + "loss": 1.0071, + "step": 1648 + }, + { + "epoch": 0.28, + "learning_rate": 1.6980270187473757e-05, + "loss": 0.966, + "step": 1649 + }, + { + "epoch": 0.28, + "learning_rate": 1.6976381145051513e-05, + "loss": 0.9326, + "step": 1650 + }, + { + "epoch": 0.28, + "learning_rate": 1.697249004591919e-05, + "loss": 0.9445, + "step": 1651 + }, + { + "epoch": 0.28, + "learning_rate": 1.696859689122393e-05, + "loss": 1.0231, + "step": 1652 + }, + { + "epoch": 0.28, + "learning_rate": 1.6964701682113477e-05, + "loss": 0.9867, + "step": 1653 + }, + { + "epoch": 0.28, + "learning_rate": 1.6960804419736172e-05, + "loss": 0.949, + "step": 1654 + }, + { + "epoch": 0.28, + "learning_rate": 1.695690510524097e-05, + "loss": 0.9522, + "step": 1655 + }, + { + "epoch": 0.28, + "learning_rate": 1.6953003739777438e-05, + "loss": 0.9175, + "step": 1656 + }, + { + "epoch": 0.28, + "learning_rate": 1.6949100324495727e-05, + "loss": 0.9586, + "step": 1657 + }, + { + "epoch": 0.28, + "learning_rate": 1.6945194860546614e-05, + "loss": 0.9428, + "step": 1658 + }, + { + "epoch": 0.28, + "learning_rate": 1.6941287349081466e-05, + "loss": 1.0129, + "step": 1659 + }, + { + "epoch": 0.28, + "learning_rate": 1.6937377791252262e-05, + "loss": 1.0099, + "step": 1660 + }, + { + "epoch": 0.28, + "learning_rate": 1.6933466188211575e-05, + "loss": 0.9204, + "step": 1661 + }, + { + "epoch": 0.28, + "learning_rate": 1.6929552541112592e-05, + "loss": 1.0058, + "step": 1662 + }, + { + "epoch": 0.28, + "learning_rate": 1.6925636851109095e-05, + "loss": 0.9402, + "step": 1663 + }, + { + "epoch": 0.28, + "learning_rate": 1.692171911935547e-05, + "loss": 0.9328, + "step": 1664 + }, + { + "epoch": 0.28, + "learning_rate": 1.69177993470067e-05, + "loss": 0.9079, + "step": 1665 + }, + { + "epoch": 0.28, + "learning_rate": 1.6913877535218386e-05, + "loss": 0.9453, + "step": 1666 + }, + { + "epoch": 0.28, + "learning_rate": 1.6909953685146713e-05, + "loss": 0.94, + "step": 1667 + }, + { + "epoch": 0.28, + "learning_rate": 1.6906027797948473e-05, + "loss": 1.0224, + "step": 1668 + }, + { + "epoch": 0.28, + "learning_rate": 1.6902099874781058e-05, + "loss": 0.9641, + "step": 1669 + }, + { + "epoch": 0.28, + "learning_rate": 1.689816991680246e-05, + "loss": 0.9681, + "step": 1670 + }, + { + "epoch": 0.28, + "learning_rate": 1.689423792517128e-05, + "loss": 0.9808, + "step": 1671 + }, + { + "epoch": 0.28, + "learning_rate": 1.6890303901046695e-05, + "loss": 0.9642, + "step": 1672 + }, + { + "epoch": 0.28, + "learning_rate": 1.6886367845588507e-05, + "loss": 0.9921, + "step": 1673 + }, + { + "epoch": 0.28, + "learning_rate": 1.688242975995711e-05, + "loss": 0.956, + "step": 1674 + }, + { + "epoch": 0.28, + "learning_rate": 1.687848964531348e-05, + "loss": 0.9801, + "step": 1675 + }, + { + "epoch": 0.28, + "learning_rate": 1.6874547502819213e-05, + "loss": 0.9585, + "step": 1676 + }, + { + "epoch": 0.28, + "learning_rate": 1.6870603333636495e-05, + "loss": 0.8713, + "step": 1677 + }, + { + "epoch": 0.28, + "learning_rate": 1.6866657138928106e-05, + "loss": 0.9747, + "step": 1678 + }, + { + "epoch": 0.28, + "learning_rate": 1.6862708919857417e-05, + "loss": 0.9787, + "step": 1679 + }, + { + "epoch": 0.28, + "learning_rate": 1.685875867758842e-05, + "loss": 0.9887, + "step": 1680 + }, + { + "epoch": 0.28, + "learning_rate": 1.6854806413285674e-05, + "loss": 0.9577, + "step": 1681 + }, + { + "epoch": 0.28, + "learning_rate": 1.6850852128114357e-05, + "loss": 0.8925, + "step": 1682 + }, + { + "epoch": 0.28, + "learning_rate": 1.6846895823240228e-05, + "loss": 0.9502, + "step": 1683 + }, + { + "epoch": 0.28, + "learning_rate": 1.6842937499829652e-05, + "loss": 0.9605, + "step": 1684 + }, + { + "epoch": 0.28, + "learning_rate": 1.6838977159049575e-05, + "loss": 0.9004, + "step": 1685 + }, + { + "epoch": 0.28, + "learning_rate": 1.683501480206756e-05, + "loss": 1.0072, + "step": 1686 + }, + { + "epoch": 0.28, + "learning_rate": 1.683105043005174e-05, + "loss": 0.9037, + "step": 1687 + }, + { + "epoch": 0.28, + "learning_rate": 1.6827084044170857e-05, + "loss": 0.8911, + "step": 1688 + }, + { + "epoch": 0.28, + "learning_rate": 1.6823115645594244e-05, + "loss": 0.9011, + "step": 1689 + }, + { + "epoch": 0.28, + "learning_rate": 1.681914523549183e-05, + "loss": 0.9616, + "step": 1690 + }, + { + "epoch": 0.28, + "learning_rate": 1.6815172815034128e-05, + "loss": 0.9921, + "step": 1691 + }, + { + "epoch": 0.28, + "learning_rate": 1.6811198385392246e-05, + "loss": 0.9484, + "step": 1692 + }, + { + "epoch": 0.28, + "learning_rate": 1.6807221947737895e-05, + "loss": 0.8953, + "step": 1693 + }, + { + "epoch": 0.28, + "learning_rate": 1.6803243503243368e-05, + "loss": 0.9667, + "step": 1694 + }, + { + "epoch": 0.28, + "learning_rate": 1.6799263053081548e-05, + "loss": 0.9856, + "step": 1695 + }, + { + "epoch": 0.28, + "learning_rate": 1.6795280598425918e-05, + "loss": 0.9723, + "step": 1696 + }, + { + "epoch": 0.28, + "learning_rate": 1.6791296140450547e-05, + "loss": 0.9898, + "step": 1697 + }, + { + "epoch": 0.28, + "learning_rate": 1.6787309680330093e-05, + "loss": 0.9839, + "step": 1698 + }, + { + "epoch": 0.28, + "learning_rate": 1.6783321219239808e-05, + "loss": 0.9403, + "step": 1699 + }, + { + "epoch": 0.28, + "learning_rate": 1.677933075835553e-05, + "loss": 0.9561, + "step": 1700 + }, + { + "epoch": 0.29, + "learning_rate": 1.6775338298853687e-05, + "loss": 0.9461, + "step": 1701 + }, + { + "epoch": 0.29, + "learning_rate": 1.6771343841911302e-05, + "loss": 0.9229, + "step": 1702 + }, + { + "epoch": 0.29, + "learning_rate": 1.676734738870598e-05, + "loss": 0.9584, + "step": 1703 + }, + { + "epoch": 0.29, + "learning_rate": 1.676334894041592e-05, + "loss": 0.9683, + "step": 1704 + }, + { + "epoch": 0.29, + "learning_rate": 1.67593484982199e-05, + "loss": 0.9516, + "step": 1705 + }, + { + "epoch": 0.29, + "learning_rate": 1.6755346063297303e-05, + "loss": 1.0207, + "step": 1706 + }, + { + "epoch": 0.29, + "learning_rate": 1.6751341636828076e-05, + "loss": 1.0289, + "step": 1707 + }, + { + "epoch": 0.29, + "learning_rate": 1.6747335219992777e-05, + "loss": 0.9187, + "step": 1708 + }, + { + "epoch": 0.29, + "learning_rate": 1.674332681397253e-05, + "loss": 1.0174, + "step": 1709 + }, + { + "epoch": 0.29, + "learning_rate": 1.673931641994906e-05, + "loss": 0.8875, + "step": 1710 + }, + { + "epoch": 0.29, + "learning_rate": 1.6735304039104675e-05, + "loss": 0.8931, + "step": 1711 + }, + { + "epoch": 0.29, + "learning_rate": 1.6731289672622263e-05, + "loss": 0.9773, + "step": 1712 + }, + { + "epoch": 0.29, + "learning_rate": 1.6727273321685303e-05, + "loss": 0.9347, + "step": 1713 + }, + { + "epoch": 0.29, + "learning_rate": 1.6723254987477858e-05, + "loss": 1.0346, + "step": 1714 + }, + { + "epoch": 0.29, + "learning_rate": 1.671923467118457e-05, + "loss": 1.017, + "step": 1715 + }, + { + "epoch": 0.29, + "learning_rate": 1.6715212373990676e-05, + "loss": 0.9319, + "step": 1716 + }, + { + "epoch": 0.29, + "learning_rate": 1.6711188097081987e-05, + "loss": 0.9164, + "step": 1717 + }, + { + "epoch": 0.29, + "learning_rate": 1.6707161841644908e-05, + "loss": 0.966, + "step": 1718 + }, + { + "epoch": 0.29, + "learning_rate": 1.6703133608866415e-05, + "loss": 0.9559, + "step": 1719 + }, + { + "epoch": 0.29, + "learning_rate": 1.6699103399934076e-05, + "loss": 0.9051, + "step": 1720 + }, + { + "epoch": 0.29, + "learning_rate": 1.6695071216036037e-05, + "loss": 0.9851, + "step": 1721 + }, + { + "epoch": 0.29, + "learning_rate": 1.6691037058361032e-05, + "loss": 0.9496, + "step": 1722 + }, + { + "epoch": 0.29, + "learning_rate": 1.6687000928098366e-05, + "loss": 0.994, + "step": 1723 + }, + { + "epoch": 0.29, + "learning_rate": 1.668296282643794e-05, + "loss": 0.9455, + "step": 1724 + }, + { + "epoch": 0.29, + "learning_rate": 1.667892275457022e-05, + "loss": 0.9694, + "step": 1725 + }, + { + "epoch": 0.29, + "learning_rate": 1.667488071368627e-05, + "loss": 0.9679, + "step": 1726 + }, + { + "epoch": 0.29, + "learning_rate": 1.667083670497772e-05, + "loss": 1.0566, + "step": 1727 + }, + { + "epoch": 0.29, + "learning_rate": 1.666679072963679e-05, + "loss": 0.9542, + "step": 1728 + }, + { + "epoch": 0.29, + "learning_rate": 1.6662742788856275e-05, + "loss": 0.898, + "step": 1729 + }, + { + "epoch": 0.29, + "learning_rate": 1.6658692883829548e-05, + "loss": 0.9792, + "step": 1730 + }, + { + "epoch": 0.29, + "learning_rate": 1.6654641015750564e-05, + "loss": 0.9149, + "step": 1731 + }, + { + "epoch": 0.29, + "learning_rate": 1.665058718581386e-05, + "loss": 0.9495, + "step": 1732 + }, + { + "epoch": 0.29, + "learning_rate": 1.6646531395214537e-05, + "loss": 0.9579, + "step": 1733 + }, + { + "epoch": 0.29, + "learning_rate": 1.6642473645148297e-05, + "loss": 0.9213, + "step": 1734 + }, + { + "epoch": 0.29, + "learning_rate": 1.6638413936811398e-05, + "loss": 0.3398, + "step": 1735 + }, + { + "epoch": 0.29, + "learning_rate": 1.6634352271400693e-05, + "loss": 0.9423, + "step": 1736 + }, + { + "epoch": 0.29, + "learning_rate": 1.66302886501136e-05, + "loss": 0.8848, + "step": 1737 + }, + { + "epoch": 0.29, + "learning_rate": 1.6626223074148105e-05, + "loss": 0.985, + "step": 1738 + }, + { + "epoch": 0.29, + "learning_rate": 1.6622155544702804e-05, + "loss": 0.3724, + "step": 1739 + }, + { + "epoch": 0.29, + "learning_rate": 1.661808606297683e-05, + "loss": 0.9327, + "step": 1740 + }, + { + "epoch": 0.29, + "learning_rate": 1.6614014630169916e-05, + "loss": 0.9966, + "step": 1741 + }, + { + "epoch": 0.29, + "learning_rate": 1.660994124748236e-05, + "loss": 0.9721, + "step": 1742 + }, + { + "epoch": 0.29, + "learning_rate": 1.6605865916115044e-05, + "loss": 0.9238, + "step": 1743 + }, + { + "epoch": 0.29, + "learning_rate": 1.660178863726941e-05, + "loss": 0.9772, + "step": 1744 + }, + { + "epoch": 0.29, + "learning_rate": 1.659770941214749e-05, + "loss": 1.0167, + "step": 1745 + }, + { + "epoch": 0.29, + "learning_rate": 1.6593628241951875e-05, + "loss": 0.9085, + "step": 1746 + }, + { + "epoch": 0.29, + "learning_rate": 1.658954512788574e-05, + "loss": 0.9217, + "step": 1747 + }, + { + "epoch": 0.29, + "learning_rate": 1.658546007115283e-05, + "loss": 0.9958, + "step": 1748 + }, + { + "epoch": 0.29, + "learning_rate": 1.658137307295746e-05, + "loss": 0.3533, + "step": 1749 + }, + { + "epoch": 0.29, + "learning_rate": 1.6577284134504527e-05, + "loss": 0.9719, + "step": 1750 + }, + { + "epoch": 0.29, + "learning_rate": 1.6573193256999486e-05, + "loss": 0.9305, + "step": 1751 + }, + { + "epoch": 0.29, + "learning_rate": 1.6569100441648373e-05, + "loss": 0.9284, + "step": 1752 + }, + { + "epoch": 0.29, + "learning_rate": 1.6565005689657792e-05, + "loss": 1.0139, + "step": 1753 + }, + { + "epoch": 0.29, + "learning_rate": 1.6560909002234917e-05, + "loss": 0.8721, + "step": 1754 + }, + { + "epoch": 0.29, + "learning_rate": 1.6556810380587497e-05, + "loss": 1.0082, + "step": 1755 + }, + { + "epoch": 0.29, + "learning_rate": 1.6552709825923846e-05, + "loss": 0.9091, + "step": 1756 + }, + { + "epoch": 0.29, + "learning_rate": 1.6548607339452853e-05, + "loss": 0.9714, + "step": 1757 + }, + { + "epoch": 0.29, + "learning_rate": 1.654450292238397e-05, + "loss": 1.0059, + "step": 1758 + }, + { + "epoch": 0.29, + "learning_rate": 1.654039657592723e-05, + "loss": 0.9055, + "step": 1759 + }, + { + "epoch": 0.3, + "learning_rate": 1.6536288301293218e-05, + "loss": 0.981, + "step": 1760 + }, + { + "epoch": 0.3, + "learning_rate": 1.65321780996931e-05, + "loss": 0.928, + "step": 1761 + }, + { + "epoch": 0.3, + "learning_rate": 1.6528065972338607e-05, + "loss": 0.9819, + "step": 1762 + }, + { + "epoch": 0.3, + "learning_rate": 1.6523951920442032e-05, + "loss": 0.9029, + "step": 1763 + }, + { + "epoch": 0.3, + "learning_rate": 1.6519835945216253e-05, + "loss": 0.9378, + "step": 1764 + }, + { + "epoch": 0.3, + "learning_rate": 1.651571804787469e-05, + "loss": 0.9439, + "step": 1765 + }, + { + "epoch": 0.3, + "learning_rate": 1.6511598229631344e-05, + "loss": 0.3229, + "step": 1766 + }, + { + "epoch": 0.3, + "learning_rate": 1.6507476491700788e-05, + "loss": 0.3442, + "step": 1767 + }, + { + "epoch": 0.3, + "learning_rate": 1.6503352835298147e-05, + "loss": 0.9555, + "step": 1768 + }, + { + "epoch": 0.3, + "learning_rate": 1.6499227261639116e-05, + "loss": 0.948, + "step": 1769 + }, + { + "epoch": 0.3, + "learning_rate": 1.649509977193996e-05, + "loss": 1.0448, + "step": 1770 + }, + { + "epoch": 0.3, + "learning_rate": 1.6490970367417515e-05, + "loss": 0.9558, + "step": 1771 + }, + { + "epoch": 0.3, + "learning_rate": 1.648683904928916e-05, + "loss": 0.8792, + "step": 1772 + }, + { + "epoch": 0.3, + "learning_rate": 1.6482705818772853e-05, + "loss": 1.0149, + "step": 1773 + }, + { + "epoch": 0.3, + "learning_rate": 1.647857067708712e-05, + "loss": 0.9475, + "step": 1774 + }, + { + "epoch": 0.3, + "learning_rate": 1.6474433625451035e-05, + "loss": 0.9052, + "step": 1775 + }, + { + "epoch": 0.3, + "learning_rate": 1.6470294665084254e-05, + "loss": 0.9054, + "step": 1776 + }, + { + "epoch": 0.3, + "learning_rate": 1.6466153797206973e-05, + "loss": 0.9744, + "step": 1777 + }, + { + "epoch": 0.3, + "learning_rate": 1.6462011023039977e-05, + "loss": 0.9955, + "step": 1778 + }, + { + "epoch": 0.3, + "learning_rate": 1.6457866343804592e-05, + "loss": 1.0248, + "step": 1779 + }, + { + "epoch": 0.3, + "learning_rate": 1.645371976072271e-05, + "loss": 0.9848, + "step": 1780 + }, + { + "epoch": 0.3, + "learning_rate": 1.6449571275016795e-05, + "loss": 0.9869, + "step": 1781 + }, + { + "epoch": 0.3, + "learning_rate": 1.6445420887909858e-05, + "loss": 0.9375, + "step": 1782 + }, + { + "epoch": 0.3, + "learning_rate": 1.6441268600625476e-05, + "loss": 0.9852, + "step": 1783 + }, + { + "epoch": 0.3, + "learning_rate": 1.643711441438779e-05, + "loss": 1.0049, + "step": 1784 + }, + { + "epoch": 0.3, + "learning_rate": 1.6432958330421497e-05, + "loss": 0.9388, + "step": 1785 + }, + { + "epoch": 0.3, + "learning_rate": 1.6428800349951853e-05, + "loss": 1.0089, + "step": 1786 + }, + { + "epoch": 0.3, + "learning_rate": 1.6424640474204675e-05, + "loss": 0.9816, + "step": 1787 + }, + { + "epoch": 0.3, + "learning_rate": 1.6420478704406337e-05, + "loss": 1.0043, + "step": 1788 + }, + { + "epoch": 0.3, + "learning_rate": 1.641631504178377e-05, + "loss": 0.9601, + "step": 1789 + }, + { + "epoch": 0.3, + "learning_rate": 1.6412149487564473e-05, + "loss": 0.9055, + "step": 1790 + }, + { + "epoch": 0.3, + "learning_rate": 1.6407982042976483e-05, + "loss": 0.9125, + "step": 1791 + }, + { + "epoch": 0.3, + "learning_rate": 1.640381270924842e-05, + "loss": 1.0183, + "step": 1792 + }, + { + "epoch": 0.3, + "learning_rate": 1.639964148760943e-05, + "loss": 0.948, + "step": 1793 + }, + { + "epoch": 0.3, + "learning_rate": 1.6395468379289253e-05, + "loss": 0.943, + "step": 1794 + }, + { + "epoch": 0.3, + "learning_rate": 1.6391293385518146e-05, + "loss": 1.0017, + "step": 1795 + }, + { + "epoch": 0.3, + "learning_rate": 1.6387116507526958e-05, + "loss": 1.0416, + "step": 1796 + }, + { + "epoch": 0.3, + "learning_rate": 1.6382937746547064e-05, + "loss": 0.9828, + "step": 1797 + }, + { + "epoch": 0.3, + "learning_rate": 1.637875710381041e-05, + "loss": 0.9286, + "step": 1798 + }, + { + "epoch": 0.3, + "learning_rate": 1.637457458054949e-05, + "loss": 0.9831, + "step": 1799 + }, + { + "epoch": 0.3, + "learning_rate": 1.637039017799736e-05, + "loss": 0.9895, + "step": 1800 + }, + { + "epoch": 0.3, + "learning_rate": 1.636620389738763e-05, + "loss": 0.9481, + "step": 1801 + }, + { + "epoch": 0.3, + "learning_rate": 1.6362015739954452e-05, + "loss": 0.9323, + "step": 1802 + }, + { + "epoch": 0.3, + "learning_rate": 1.635782570693254e-05, + "loss": 0.9506, + "step": 1803 + }, + { + "epoch": 0.3, + "learning_rate": 1.6353633799557153e-05, + "loss": 0.9827, + "step": 1804 + }, + { + "epoch": 0.3, + "learning_rate": 1.6349440019064126e-05, + "loss": 0.3667, + "step": 1805 + }, + { + "epoch": 0.3, + "learning_rate": 1.6345244366689813e-05, + "loss": 0.979, + "step": 1806 + }, + { + "epoch": 0.3, + "learning_rate": 1.6341046843671145e-05, + "loss": 0.9373, + "step": 1807 + }, + { + "epoch": 0.3, + "learning_rate": 1.6336847451245592e-05, + "loss": 0.9483, + "step": 1808 + }, + { + "epoch": 0.3, + "learning_rate": 1.633264619065118e-05, + "loss": 0.9368, + "step": 1809 + }, + { + "epoch": 0.3, + "learning_rate": 1.6328443063126488e-05, + "loss": 0.9613, + "step": 1810 + }, + { + "epoch": 0.3, + "learning_rate": 1.6324238069910633e-05, + "loss": 0.9062, + "step": 1811 + }, + { + "epoch": 0.3, + "learning_rate": 1.63200312122433e-05, + "loss": 0.9418, + "step": 1812 + }, + { + "epoch": 0.3, + "learning_rate": 1.6315822491364708e-05, + "loss": 0.8971, + "step": 1813 + }, + { + "epoch": 0.3, + "learning_rate": 1.6311611908515635e-05, + "loss": 0.9685, + "step": 1814 + }, + { + "epoch": 0.3, + "learning_rate": 1.6307399464937404e-05, + "loss": 0.9008, + "step": 1815 + }, + { + "epoch": 0.3, + "learning_rate": 1.6303185161871895e-05, + "loss": 0.9469, + "step": 1816 + }, + { + "epoch": 0.3, + "learning_rate": 1.6298969000561515e-05, + "loss": 0.9318, + "step": 1817 + }, + { + "epoch": 0.3, + "learning_rate": 1.629475098224924e-05, + "loss": 0.961, + "step": 1818 + }, + { + "epoch": 0.3, + "learning_rate": 1.6290531108178587e-05, + "loss": 1.0046, + "step": 1819 + }, + { + "epoch": 0.31, + "learning_rate": 1.6286309379593616e-05, + "loss": 0.9328, + "step": 1820 + }, + { + "epoch": 0.31, + "learning_rate": 1.6282085797738937e-05, + "loss": 0.9382, + "step": 1821 + }, + { + "epoch": 0.31, + "learning_rate": 1.627786036385971e-05, + "loss": 0.9779, + "step": 1822 + }, + { + "epoch": 0.31, + "learning_rate": 1.6273633079201626e-05, + "loss": 0.9118, + "step": 1823 + }, + { + "epoch": 0.31, + "learning_rate": 1.6269403945010948e-05, + "loss": 1.0058, + "step": 1824 + }, + { + "epoch": 0.31, + "learning_rate": 1.626517296253446e-05, + "loss": 0.991, + "step": 1825 + }, + { + "epoch": 0.31, + "learning_rate": 1.62609401330195e-05, + "loss": 0.9844, + "step": 1826 + }, + { + "epoch": 0.31, + "learning_rate": 1.6256705457713952e-05, + "loss": 0.9602, + "step": 1827 + }, + { + "epoch": 0.31, + "learning_rate": 1.6252468937866243e-05, + "loss": 0.3697, + "step": 1828 + }, + { + "epoch": 0.31, + "learning_rate": 1.624823057472534e-05, + "loss": 0.9498, + "step": 1829 + }, + { + "epoch": 0.31, + "learning_rate": 1.6243990369540764e-05, + "loss": 1.0282, + "step": 1830 + }, + { + "epoch": 0.31, + "learning_rate": 1.6239748323562562e-05, + "loss": 0.9485, + "step": 1831 + }, + { + "epoch": 0.31, + "learning_rate": 1.6235504438041342e-05, + "loss": 0.9553, + "step": 1832 + }, + { + "epoch": 0.31, + "learning_rate": 1.623125871422824e-05, + "loss": 0.9321, + "step": 1833 + }, + { + "epoch": 0.31, + "learning_rate": 1.6227011153374945e-05, + "loss": 0.9608, + "step": 1834 + }, + { + "epoch": 0.31, + "learning_rate": 1.622276175673368e-05, + "loss": 1.0294, + "step": 1835 + }, + { + "epoch": 0.31, + "learning_rate": 1.6218510525557206e-05, + "loss": 0.9577, + "step": 1836 + }, + { + "epoch": 0.31, + "learning_rate": 1.621425746109884e-05, + "loss": 0.9702, + "step": 1837 + }, + { + "epoch": 0.31, + "learning_rate": 1.6210002564612425e-05, + "loss": 0.9359, + "step": 1838 + }, + { + "epoch": 0.31, + "learning_rate": 1.6205745837352346e-05, + "loss": 0.9087, + "step": 1839 + }, + { + "epoch": 0.31, + "learning_rate": 1.6201487280573536e-05, + "loss": 0.8731, + "step": 1840 + }, + { + "epoch": 0.31, + "learning_rate": 1.619722689553146e-05, + "loss": 0.3885, + "step": 1841 + }, + { + "epoch": 0.31, + "learning_rate": 1.6192964683482127e-05, + "loss": 0.9777, + "step": 1842 + }, + { + "epoch": 0.31, + "learning_rate": 1.6188700645682075e-05, + "loss": 0.8994, + "step": 1843 + }, + { + "epoch": 0.31, + "learning_rate": 1.618443478338839e-05, + "loss": 0.9851, + "step": 1844 + }, + { + "epoch": 0.31, + "learning_rate": 1.6180167097858697e-05, + "loss": 0.982, + "step": 1845 + }, + { + "epoch": 0.31, + "learning_rate": 1.6175897590351146e-05, + "loss": 0.3823, + "step": 1846 + }, + { + "epoch": 0.31, + "learning_rate": 1.617162626212444e-05, + "loss": 0.9506, + "step": 1847 + }, + { + "epoch": 0.31, + "learning_rate": 1.616735311443781e-05, + "loss": 0.9131, + "step": 1848 + }, + { + "epoch": 0.31, + "learning_rate": 1.616307814855102e-05, + "loss": 0.9313, + "step": 1849 + }, + { + "epoch": 0.31, + "learning_rate": 1.6158801365724376e-05, + "loss": 0.944, + "step": 1850 + }, + { + "epoch": 0.31, + "learning_rate": 1.6154522767218726e-05, + "loss": 0.9729, + "step": 1851 + }, + { + "epoch": 0.31, + "learning_rate": 1.6150242354295435e-05, + "loss": 0.9464, + "step": 1852 + }, + { + "epoch": 0.31, + "learning_rate": 1.614596012821642e-05, + "loss": 0.9784, + "step": 1853 + }, + { + "epoch": 0.31, + "learning_rate": 1.614167609024412e-05, + "loss": 0.977, + "step": 1854 + }, + { + "epoch": 0.31, + "learning_rate": 1.613739024164152e-05, + "loss": 0.9471, + "step": 1855 + }, + { + "epoch": 0.31, + "learning_rate": 1.613310258367213e-05, + "loss": 0.9577, + "step": 1856 + }, + { + "epoch": 0.31, + "learning_rate": 1.61288131176e-05, + "loss": 0.9028, + "step": 1857 + }, + { + "epoch": 0.31, + "learning_rate": 1.6124521844689707e-05, + "loss": 0.9266, + "step": 1858 + }, + { + "epoch": 0.31, + "learning_rate": 1.612022876620636e-05, + "loss": 0.9107, + "step": 1859 + }, + { + "epoch": 0.31, + "learning_rate": 1.6115933883415607e-05, + "loss": 0.373, + "step": 1860 + }, + { + "epoch": 0.31, + "learning_rate": 1.6111637197583623e-05, + "loss": 0.9469, + "step": 1861 + }, + { + "epoch": 0.31, + "learning_rate": 1.610733870997712e-05, + "loss": 0.9323, + "step": 1862 + }, + { + "epoch": 0.31, + "learning_rate": 1.6103038421863332e-05, + "loss": 0.9121, + "step": 1863 + }, + { + "epoch": 0.31, + "learning_rate": 1.609873633451003e-05, + "loss": 0.9606, + "step": 1864 + }, + { + "epoch": 0.31, + "learning_rate": 1.6094432449185513e-05, + "loss": 0.9333, + "step": 1865 + }, + { + "epoch": 0.31, + "learning_rate": 1.6090126767158616e-05, + "loss": 0.9661, + "step": 1866 + }, + { + "epoch": 0.31, + "learning_rate": 1.6085819289698695e-05, + "loss": 0.9359, + "step": 1867 + }, + { + "epoch": 0.31, + "learning_rate": 1.6081510018075638e-05, + "loss": 0.9392, + "step": 1868 + }, + { + "epoch": 0.31, + "learning_rate": 1.607719895355987e-05, + "loss": 0.9575, + "step": 1869 + }, + { + "epoch": 0.31, + "learning_rate": 1.6072886097422333e-05, + "loss": 0.8808, + "step": 1870 + }, + { + "epoch": 0.31, + "learning_rate": 1.60685714509345e-05, + "loss": 0.9777, + "step": 1871 + }, + { + "epoch": 0.31, + "learning_rate": 1.606425501536838e-05, + "loss": 0.8886, + "step": 1872 + }, + { + "epoch": 0.31, + "learning_rate": 1.60599367919965e-05, + "loss": 0.8994, + "step": 1873 + }, + { + "epoch": 0.31, + "learning_rate": 1.6055616782091917e-05, + "loss": 1.0152, + "step": 1874 + }, + { + "epoch": 0.31, + "learning_rate": 1.6051294986928218e-05, + "loss": 1.0019, + "step": 1875 + }, + { + "epoch": 0.31, + "learning_rate": 1.6046971407779507e-05, + "loss": 0.9147, + "step": 1876 + }, + { + "epoch": 0.31, + "learning_rate": 1.604264604592042e-05, + "loss": 0.9711, + "step": 1877 + }, + { + "epoch": 0.31, + "learning_rate": 1.603831890262613e-05, + "loss": 0.973, + "step": 1878 + }, + { + "epoch": 0.32, + "learning_rate": 1.603398997917232e-05, + "loss": 0.9246, + "step": 1879 + }, + { + "epoch": 0.32, + "learning_rate": 1.6029659276835193e-05, + "loss": 0.3349, + "step": 1880 + }, + { + "epoch": 0.32, + "learning_rate": 1.6025326796891492e-05, + "loss": 0.9605, + "step": 1881 + }, + { + "epoch": 0.32, + "learning_rate": 1.6020992540618476e-05, + "loss": 0.9661, + "step": 1882 + }, + { + "epoch": 0.32, + "learning_rate": 1.6016656509293933e-05, + "loss": 0.3746, + "step": 1883 + }, + { + "epoch": 0.32, + "learning_rate": 1.6012318704196164e-05, + "loss": 0.9773, + "step": 1884 + }, + { + "epoch": 0.32, + "learning_rate": 1.6007979126604005e-05, + "loss": 0.9175, + "step": 1885 + }, + { + "epoch": 0.32, + "learning_rate": 1.6003637777796804e-05, + "loss": 1.018, + "step": 1886 + }, + { + "epoch": 0.32, + "learning_rate": 1.599929465905444e-05, + "loss": 0.9275, + "step": 1887 + }, + { + "epoch": 0.32, + "learning_rate": 1.5994949771657307e-05, + "loss": 0.9665, + "step": 1888 + }, + { + "epoch": 0.32, + "learning_rate": 1.5990603116886326e-05, + "loss": 0.9449, + "step": 1889 + }, + { + "epoch": 0.32, + "learning_rate": 1.5986254696022935e-05, + "loss": 0.9705, + "step": 1890 + }, + { + "epoch": 0.32, + "learning_rate": 1.5981904510349092e-05, + "loss": 0.9679, + "step": 1891 + }, + { + "epoch": 0.32, + "learning_rate": 1.5977552561147276e-05, + "loss": 0.929, + "step": 1892 + }, + { + "epoch": 0.32, + "learning_rate": 1.5973198849700494e-05, + "loss": 0.9506, + "step": 1893 + }, + { + "epoch": 0.32, + "learning_rate": 1.596884337729226e-05, + "loss": 0.9344, + "step": 1894 + }, + { + "epoch": 0.32, + "learning_rate": 1.5964486145206612e-05, + "loss": 0.9805, + "step": 1895 + }, + { + "epoch": 0.32, + "learning_rate": 1.5960127154728112e-05, + "loss": 0.909, + "step": 1896 + }, + { + "epoch": 0.32, + "learning_rate": 1.5955766407141834e-05, + "loss": 0.9314, + "step": 1897 + }, + { + "epoch": 0.32, + "learning_rate": 1.5951403903733373e-05, + "loss": 0.8881, + "step": 1898 + }, + { + "epoch": 0.32, + "learning_rate": 1.5947039645788834e-05, + "loss": 0.9637, + "step": 1899 + }, + { + "epoch": 0.32, + "learning_rate": 1.5942673634594853e-05, + "loss": 0.9957, + "step": 1900 + }, + { + "epoch": 0.32, + "learning_rate": 1.11731843575419e-07, + "loss": 0.9556, + "step": 1901 + }, + { + "epoch": 0.32, + "learning_rate": 2.23463687150838e-07, + "loss": 1.0238, + "step": 1902 + }, + { + "epoch": 0.32, + "learning_rate": 3.3519553072625703e-07, + "loss": 0.9935, + "step": 1903 + }, + { + "epoch": 0.32, + "learning_rate": 4.46927374301676e-07, + "loss": 0.9386, + "step": 1904 + }, + { + "epoch": 0.32, + "learning_rate": 5.58659217877095e-07, + "loss": 0.9827, + "step": 1905 + }, + { + "epoch": 0.32, + "learning_rate": 6.703910614525141e-07, + "loss": 1.0096, + "step": 1906 + }, + { + "epoch": 0.32, + "learning_rate": 7.82122905027933e-07, + "loss": 0.9289, + "step": 1907 + }, + { + "epoch": 0.32, + "learning_rate": 8.93854748603352e-07, + "loss": 0.8968, + "step": 1908 + }, + { + "epoch": 0.32, + "learning_rate": 1.005586592178771e-06, + "loss": 1.0083, + "step": 1909 + }, + { + "epoch": 0.32, + "learning_rate": 1.11731843575419e-06, + "loss": 0.9223, + "step": 1910 + }, + { + "epoch": 0.32, + "learning_rate": 1.229050279329609e-06, + "loss": 0.9253, + "step": 1911 + }, + { + "epoch": 0.32, + "learning_rate": 1.3407821229050281e-06, + "loss": 0.9412, + "step": 1912 + }, + { + "epoch": 0.32, + "learning_rate": 1.4525139664804472e-06, + "loss": 0.9574, + "step": 1913 + }, + { + "epoch": 0.32, + "learning_rate": 1.564245810055866e-06, + "loss": 0.8988, + "step": 1914 + }, + { + "epoch": 0.32, + "learning_rate": 1.675977653631285e-06, + "loss": 0.9168, + "step": 1915 + }, + { + "epoch": 0.32, + "learning_rate": 1.787709497206704e-06, + "loss": 0.9858, + "step": 1916 + }, + { + "epoch": 0.32, + "learning_rate": 1.899441340782123e-06, + "loss": 0.866, + "step": 1917 + }, + { + "epoch": 0.32, + "learning_rate": 2.011173184357542e-06, + "loss": 1.0105, + "step": 1918 + }, + { + "epoch": 0.32, + "learning_rate": 2.1229050279329612e-06, + "loss": 0.9, + "step": 1919 + }, + { + "epoch": 0.32, + "learning_rate": 2.23463687150838e-06, + "loss": 0.9947, + "step": 1920 + }, + { + "epoch": 0.32, + "learning_rate": 2.3463687150837993e-06, + "loss": 0.9399, + "step": 1921 + }, + { + "epoch": 0.32, + "learning_rate": 2.458100558659218e-06, + "loss": 0.9611, + "step": 1922 + }, + { + "epoch": 0.32, + "learning_rate": 2.569832402234637e-06, + "loss": 0.9343, + "step": 1923 + }, + { + "epoch": 0.32, + "learning_rate": 2.6815642458100562e-06, + "loss": 0.9352, + "step": 1924 + }, + { + "epoch": 0.32, + "learning_rate": 2.793296089385475e-06, + "loss": 0.9538, + "step": 1925 + }, + { + "epoch": 0.32, + "learning_rate": 2.9050279329608943e-06, + "loss": 0.3353, + "step": 1926 + }, + { + "epoch": 0.32, + "learning_rate": 3.016759776536313e-06, + "loss": 0.9126, + "step": 1927 + }, + { + "epoch": 0.32, + "learning_rate": 3.128491620111732e-06, + "loss": 0.8986, + "step": 1928 + }, + { + "epoch": 0.32, + "learning_rate": 3.240223463687151e-06, + "loss": 0.9611, + "step": 1929 + }, + { + "epoch": 0.32, + "learning_rate": 3.35195530726257e-06, + "loss": 0.3628, + "step": 1930 + }, + { + "epoch": 0.32, + "learning_rate": 3.4636871508379893e-06, + "loss": 0.9265, + "step": 1931 + }, + { + "epoch": 0.32, + "learning_rate": 3.575418994413408e-06, + "loss": 0.9461, + "step": 1932 + }, + { + "epoch": 0.32, + "learning_rate": 3.687150837988827e-06, + "loss": 0.9313, + "step": 1933 + }, + { + "epoch": 0.32, + "learning_rate": 3.798882681564246e-06, + "loss": 0.9649, + "step": 1934 + }, + { + "epoch": 0.32, + "learning_rate": 3.910614525139665e-06, + "loss": 0.9071, + "step": 1935 + }, + { + "epoch": 0.32, + "learning_rate": 4.022346368715084e-06, + "loss": 0.9716, + "step": 1936 + }, + { + "epoch": 0.32, + "learning_rate": 4.134078212290504e-06, + "loss": 0.9316, + "step": 1937 + }, + { + "epoch": 0.32, + "learning_rate": 4.2458100558659224e-06, + "loss": 0.8935, + "step": 1938 + }, + { + "epoch": 0.33, + "learning_rate": 4.357541899441341e-06, + "loss": 0.9033, + "step": 1939 + }, + { + "epoch": 0.33, + "learning_rate": 4.46927374301676e-06, + "loss": 0.8751, + "step": 1940 + }, + { + "epoch": 0.33, + "learning_rate": 4.581005586592179e-06, + "loss": 0.9688, + "step": 1941 + }, + { + "epoch": 0.33, + "learning_rate": 4.692737430167599e-06, + "loss": 0.9184, + "step": 1942 + }, + { + "epoch": 0.33, + "learning_rate": 4.8044692737430175e-06, + "loss": 1.0064, + "step": 1943 + }, + { + "epoch": 0.33, + "learning_rate": 4.916201117318436e-06, + "loss": 0.9539, + "step": 1944 + }, + { + "epoch": 0.33, + "learning_rate": 5.027932960893855e-06, + "loss": 0.9686, + "step": 1945 + }, + { + "epoch": 0.33, + "learning_rate": 5.139664804469274e-06, + "loss": 0.9591, + "step": 1946 + }, + { + "epoch": 0.33, + "learning_rate": 5.251396648044693e-06, + "loss": 0.9842, + "step": 1947 + }, + { + "epoch": 0.33, + "learning_rate": 5.3631284916201125e-06, + "loss": 0.9152, + "step": 1948 + }, + { + "epoch": 0.33, + "learning_rate": 5.474860335195531e-06, + "loss": 0.9581, + "step": 1949 + }, + { + "epoch": 0.33, + "learning_rate": 5.58659217877095e-06, + "loss": 0.9359, + "step": 1950 + }, + { + "epoch": 0.33, + "learning_rate": 5.698324022346369e-06, + "loss": 0.9513, + "step": 1951 + }, + { + "epoch": 0.33, + "learning_rate": 5.810055865921789e-06, + "loss": 0.939, + "step": 1952 + }, + { + "epoch": 0.33, + "learning_rate": 5.9217877094972075e-06, + "loss": 0.934, + "step": 1953 + }, + { + "epoch": 0.33, + "learning_rate": 6.033519553072626e-06, + "loss": 1.0162, + "step": 1954 + }, + { + "epoch": 0.33, + "learning_rate": 6.145251396648045e-06, + "loss": 1.0404, + "step": 1955 + }, + { + "epoch": 0.33, + "learning_rate": 6.256983240223464e-06, + "loss": 0.9122, + "step": 1956 + }, + { + "epoch": 0.33, + "learning_rate": 6.368715083798883e-06, + "loss": 0.9522, + "step": 1957 + }, + { + "epoch": 0.33, + "learning_rate": 6.480446927374302e-06, + "loss": 0.9622, + "step": 1958 + }, + { + "epoch": 0.33, + "learning_rate": 6.592178770949721e-06, + "loss": 0.935, + "step": 1959 + }, + { + "epoch": 0.33, + "learning_rate": 6.70391061452514e-06, + "loss": 0.9248, + "step": 1960 + }, + { + "epoch": 0.33, + "learning_rate": 6.815642458100559e-06, + "loss": 0.8945, + "step": 1961 + }, + { + "epoch": 0.33, + "learning_rate": 6.927374301675979e-06, + "loss": 0.971, + "step": 1962 + }, + { + "epoch": 0.33, + "learning_rate": 7.0391061452513975e-06, + "loss": 0.9063, + "step": 1963 + }, + { + "epoch": 0.33, + "learning_rate": 7.150837988826816e-06, + "loss": 0.8926, + "step": 1964 + }, + { + "epoch": 0.33, + "learning_rate": 7.262569832402235e-06, + "loss": 0.9231, + "step": 1965 + }, + { + "epoch": 0.33, + "learning_rate": 7.374301675977654e-06, + "loss": 0.8559, + "step": 1966 + }, + { + "epoch": 0.33, + "learning_rate": 7.486033519553073e-06, + "loss": 0.8943, + "step": 1967 + }, + { + "epoch": 0.33, + "learning_rate": 7.597765363128492e-06, + "loss": 0.9218, + "step": 1968 + }, + { + "epoch": 0.33, + "learning_rate": 7.709497206703911e-06, + "loss": 0.9532, + "step": 1969 + }, + { + "epoch": 0.33, + "learning_rate": 7.82122905027933e-06, + "loss": 0.3498, + "step": 1970 + }, + { + "epoch": 0.33, + "learning_rate": 7.932960893854749e-06, + "loss": 0.942, + "step": 1971 + }, + { + "epoch": 0.33, + "learning_rate": 8.044692737430168e-06, + "loss": 0.9088, + "step": 1972 + }, + { + "epoch": 0.33, + "learning_rate": 8.156424581005588e-06, + "loss": 0.9391, + "step": 1973 + }, + { + "epoch": 0.33, + "learning_rate": 8.268156424581007e-06, + "loss": 0.8914, + "step": 1974 + }, + { + "epoch": 0.33, + "learning_rate": 8.379888268156426e-06, + "loss": 0.9405, + "step": 1975 + }, + { + "epoch": 0.33, + "learning_rate": 8.491620111731845e-06, + "loss": 0.96, + "step": 1976 + }, + { + "epoch": 0.33, + "learning_rate": 8.603351955307264e-06, + "loss": 0.9487, + "step": 1977 + }, + { + "epoch": 0.33, + "learning_rate": 8.715083798882683e-06, + "loss": 0.931, + "step": 1978 + }, + { + "epoch": 0.33, + "learning_rate": 8.826815642458101e-06, + "loss": 0.9529, + "step": 1979 + }, + { + "epoch": 0.33, + "learning_rate": 8.93854748603352e-06, + "loss": 0.9495, + "step": 1980 + }, + { + "epoch": 0.33, + "learning_rate": 9.050279329608939e-06, + "loss": 0.8757, + "step": 1981 + }, + { + "epoch": 0.33, + "learning_rate": 9.162011173184358e-06, + "loss": 0.9278, + "step": 1982 + }, + { + "epoch": 0.33, + "learning_rate": 9.273743016759777e-06, + "loss": 0.9292, + "step": 1983 + }, + { + "epoch": 0.33, + "learning_rate": 9.385474860335197e-06, + "loss": 0.8547, + "step": 1984 + }, + { + "epoch": 0.33, + "learning_rate": 9.497206703910616e-06, + "loss": 0.8732, + "step": 1985 + }, + { + "epoch": 0.33, + "learning_rate": 9.608938547486035e-06, + "loss": 0.9263, + "step": 1986 + }, + { + "epoch": 0.33, + "learning_rate": 9.720670391061454e-06, + "loss": 0.9383, + "step": 1987 + }, + { + "epoch": 0.33, + "learning_rate": 9.832402234636873e-06, + "loss": 0.8878, + "step": 1988 + }, + { + "epoch": 0.33, + "learning_rate": 9.944134078212291e-06, + "loss": 0.9788, + "step": 1989 + }, + { + "epoch": 0.33, + "learning_rate": 1.005586592178771e-05, + "loss": 0.9293, + "step": 1990 + }, + { + "epoch": 0.33, + "learning_rate": 1.0167597765363129e-05, + "loss": 0.9328, + "step": 1991 + }, + { + "epoch": 0.33, + "learning_rate": 1.0279329608938548e-05, + "loss": 0.3591, + "step": 1992 + }, + { + "epoch": 0.33, + "learning_rate": 1.0391061452513967e-05, + "loss": 0.9202, + "step": 1993 + }, + { + "epoch": 0.33, + "learning_rate": 1.0502793296089386e-05, + "loss": 0.9627, + "step": 1994 + }, + { + "epoch": 0.33, + "learning_rate": 1.0614525139664806e-05, + "loss": 0.9191, + "step": 1995 + }, + { + "epoch": 0.33, + "learning_rate": 1.0726256983240225e-05, + "loss": 0.8953, + "step": 1996 + }, + { + "epoch": 0.33, + "learning_rate": 1.0837988826815644e-05, + "loss": 0.9378, + "step": 1997 + }, + { + "epoch": 0.33, + "learning_rate": 1.0949720670391063e-05, + "loss": 0.933, + "step": 1998 + }, + { + "epoch": 0.34, + "learning_rate": 1.1061452513966481e-05, + "loss": 0.9743, + "step": 1999 + }, + { + "epoch": 0.34, + "learning_rate": 1.11731843575419e-05, + "loss": 0.907, + "step": 2000 + }, + { + "epoch": 0.34, + "learning_rate": 1.1284916201117319e-05, + "loss": 0.9195, + "step": 2001 + }, + { + "epoch": 0.34, + "learning_rate": 1.1396648044692738e-05, + "loss": 0.9565, + "step": 2002 + }, + { + "epoch": 0.34, + "learning_rate": 1.1508379888268157e-05, + "loss": 0.9067, + "step": 2003 + }, + { + "epoch": 0.34, + "learning_rate": 1.1620111731843577e-05, + "loss": 0.9423, + "step": 2004 + }, + { + "epoch": 0.34, + "learning_rate": 1.1731843575418994e-05, + "loss": 0.9455, + "step": 2005 + }, + { + "epoch": 0.34, + "learning_rate": 1.1843575418994415e-05, + "loss": 0.9402, + "step": 2006 + }, + { + "epoch": 0.34, + "learning_rate": 1.1955307262569834e-05, + "loss": 0.9257, + "step": 2007 + }, + { + "epoch": 0.34, + "learning_rate": 1.2067039106145253e-05, + "loss": 0.9588, + "step": 2008 + }, + { + "epoch": 0.34, + "learning_rate": 1.2178770949720671e-05, + "loss": 0.8826, + "step": 2009 + }, + { + "epoch": 0.34, + "learning_rate": 1.229050279329609e-05, + "loss": 0.885, + "step": 2010 + }, + { + "epoch": 0.34, + "learning_rate": 1.2402234636871509e-05, + "loss": 0.3655, + "step": 2011 + }, + { + "epoch": 0.34, + "learning_rate": 1.2513966480446928e-05, + "loss": 0.9204, + "step": 2012 + }, + { + "epoch": 0.34, + "learning_rate": 1.2625698324022347e-05, + "loss": 0.9252, + "step": 2013 + }, + { + "epoch": 0.34, + "learning_rate": 1.2737430167597766e-05, + "loss": 1.0406, + "step": 2014 + }, + { + "epoch": 0.34, + "learning_rate": 1.2849162011173186e-05, + "loss": 0.9092, + "step": 2015 + }, + { + "epoch": 0.34, + "learning_rate": 1.2960893854748603e-05, + "loss": 0.9631, + "step": 2016 + }, + { + "epoch": 0.34, + "learning_rate": 1.3072625698324024e-05, + "loss": 0.9558, + "step": 2017 + }, + { + "epoch": 0.34, + "learning_rate": 1.3184357541899443e-05, + "loss": 0.9012, + "step": 2018 + }, + { + "epoch": 0.34, + "learning_rate": 1.3296089385474861e-05, + "loss": 0.8769, + "step": 2019 + }, + { + "epoch": 0.34, + "learning_rate": 1.340782122905028e-05, + "loss": 0.8885, + "step": 2020 + }, + { + "epoch": 0.34, + "learning_rate": 1.3519553072625699e-05, + "loss": 0.343, + "step": 2021 + }, + { + "epoch": 0.34, + "learning_rate": 1.3631284916201118e-05, + "loss": 0.9886, + "step": 2022 + }, + { + "epoch": 0.34, + "learning_rate": 1.3743016759776537e-05, + "loss": 0.9512, + "step": 2023 + }, + { + "epoch": 0.34, + "learning_rate": 1.3854748603351957e-05, + "loss": 0.9012, + "step": 2024 + }, + { + "epoch": 0.34, + "learning_rate": 1.3966480446927374e-05, + "loss": 0.9985, + "step": 2025 + }, + { + "epoch": 0.34, + "learning_rate": 1.4078212290502795e-05, + "loss": 0.9283, + "step": 2026 + }, + { + "epoch": 0.34, + "learning_rate": 1.4189944134078212e-05, + "loss": 0.9153, + "step": 2027 + }, + { + "epoch": 0.34, + "learning_rate": 1.4301675977653633e-05, + "loss": 0.9614, + "step": 2028 + }, + { + "epoch": 0.34, + "learning_rate": 1.4413407821229052e-05, + "loss": 0.9418, + "step": 2029 + }, + { + "epoch": 0.34, + "learning_rate": 1.452513966480447e-05, + "loss": 0.9835, + "step": 2030 + }, + { + "epoch": 0.34, + "learning_rate": 1.463687150837989e-05, + "loss": 0.9929, + "step": 2031 + }, + { + "epoch": 0.34, + "learning_rate": 1.4748603351955308e-05, + "loss": 0.9346, + "step": 2032 + }, + { + "epoch": 0.34, + "learning_rate": 1.4860335195530729e-05, + "loss": 0.9297, + "step": 2033 + }, + { + "epoch": 0.34, + "learning_rate": 1.4972067039106146e-05, + "loss": 0.9329, + "step": 2034 + }, + { + "epoch": 0.34, + "learning_rate": 1.5083798882681566e-05, + "loss": 0.8894, + "step": 2035 + }, + { + "epoch": 0.34, + "learning_rate": 1.5195530726256983e-05, + "loss": 0.9348, + "step": 2036 + }, + { + "epoch": 0.34, + "learning_rate": 1.5307262569832404e-05, + "loss": 0.9124, + "step": 2037 + }, + { + "epoch": 0.34, + "learning_rate": 1.5418994413407823e-05, + "loss": 0.9486, + "step": 2038 + }, + { + "epoch": 0.34, + "learning_rate": 1.553072625698324e-05, + "loss": 1.0199, + "step": 2039 + }, + { + "epoch": 0.34, + "learning_rate": 1.564245810055866e-05, + "loss": 0.9374, + "step": 2040 + }, + { + "epoch": 0.34, + "learning_rate": 1.575418994413408e-05, + "loss": 0.9037, + "step": 2041 + }, + { + "epoch": 0.34, + "learning_rate": 1.5865921787709498e-05, + "loss": 0.9518, + "step": 2042 + }, + { + "epoch": 0.34, + "learning_rate": 1.5977653631284917e-05, + "loss": 0.9505, + "step": 2043 + }, + { + "epoch": 0.34, + "learning_rate": 1.6089385474860336e-05, + "loss": 0.985, + "step": 2044 + }, + { + "epoch": 0.34, + "learning_rate": 1.6201117318435755e-05, + "loss": 0.9782, + "step": 2045 + }, + { + "epoch": 0.34, + "learning_rate": 1.6312849162011177e-05, + "loss": 0.9623, + "step": 2046 + }, + { + "epoch": 0.34, + "learning_rate": 1.6424581005586592e-05, + "loss": 0.9975, + "step": 2047 + }, + { + "epoch": 0.34, + "learning_rate": 1.6536312849162014e-05, + "loss": 0.9069, + "step": 2048 + }, + { + "epoch": 0.34, + "learning_rate": 1.664804469273743e-05, + "loss": 0.9699, + "step": 2049 + }, + { + "epoch": 0.34, + "learning_rate": 1.6759776536312852e-05, + "loss": 0.8923, + "step": 2050 + }, + { + "epoch": 0.34, + "learning_rate": 1.687150837988827e-05, + "loss": 0.9225, + "step": 2051 + }, + { + "epoch": 0.34, + "learning_rate": 1.698324022346369e-05, + "loss": 0.8729, + "step": 2052 + }, + { + "epoch": 0.34, + "learning_rate": 1.709497206703911e-05, + "loss": 0.9227, + "step": 2053 + }, + { + "epoch": 0.34, + "learning_rate": 1.7206703910614527e-05, + "loss": 0.8861, + "step": 2054 + }, + { + "epoch": 0.34, + "learning_rate": 1.7318435754189946e-05, + "loss": 0.9594, + "step": 2055 + }, + { + "epoch": 0.34, + "learning_rate": 1.7430167597765365e-05, + "loss": 0.3578, + "step": 2056 + }, + { + "epoch": 0.34, + "learning_rate": 1.7541899441340784e-05, + "loss": 0.8833, + "step": 2057 + }, + { + "epoch": 0.35, + "learning_rate": 1.7653631284916203e-05, + "loss": 0.9323, + "step": 2058 + }, + { + "epoch": 0.35, + "learning_rate": 1.776536312849162e-05, + "loss": 0.9532, + "step": 2059 + }, + { + "epoch": 0.35, + "learning_rate": 1.787709497206704e-05, + "loss": 0.3462, + "step": 2060 + }, + { + "epoch": 0.35, + "learning_rate": 1.798882681564246e-05, + "loss": 0.8868, + "step": 2061 + }, + { + "epoch": 0.35, + "learning_rate": 1.8100558659217878e-05, + "loss": 0.9124, + "step": 2062 + }, + { + "epoch": 0.35, + "learning_rate": 1.8212290502793297e-05, + "loss": 0.9805, + "step": 2063 + }, + { + "epoch": 0.35, + "learning_rate": 1.8324022346368716e-05, + "loss": 0.8954, + "step": 2064 + }, + { + "epoch": 0.35, + "learning_rate": 1.8435754189944135e-05, + "loss": 0.982, + "step": 2065 + }, + { + "epoch": 0.35, + "learning_rate": 1.8547486033519553e-05, + "loss": 0.8863, + "step": 2066 + }, + { + "epoch": 0.35, + "learning_rate": 1.8659217877094972e-05, + "loss": 1.008, + "step": 2067 + }, + { + "epoch": 0.35, + "learning_rate": 1.8770949720670394e-05, + "loss": 0.9123, + "step": 2068 + }, + { + "epoch": 0.35, + "learning_rate": 1.888268156424581e-05, + "loss": 0.8969, + "step": 2069 + }, + { + "epoch": 0.35, + "learning_rate": 1.8994413407821232e-05, + "loss": 0.9238, + "step": 2070 + }, + { + "epoch": 0.35, + "learning_rate": 1.910614525139665e-05, + "loss": 0.8787, + "step": 2071 + }, + { + "epoch": 0.35, + "learning_rate": 1.921787709497207e-05, + "loss": 0.9531, + "step": 2072 + }, + { + "epoch": 0.35, + "learning_rate": 1.932960893854749e-05, + "loss": 0.9578, + "step": 2073 + }, + { + "epoch": 0.35, + "learning_rate": 1.9441340782122907e-05, + "loss": 0.9244, + "step": 2074 + }, + { + "epoch": 0.35, + "learning_rate": 1.9553072625698326e-05, + "loss": 0.9653, + "step": 2075 + }, + { + "epoch": 0.35, + "learning_rate": 1.9664804469273745e-05, + "loss": 0.379, + "step": 2076 + }, + { + "epoch": 0.35, + "learning_rate": 1.9776536312849164e-05, + "loss": 0.8898, + "step": 2077 + }, + { + "epoch": 0.35, + "learning_rate": 1.9888268156424583e-05, + "loss": 0.9587, + "step": 2078 + }, + { + "epoch": 0.35, + "learning_rate": 2e-05, + "loss": 0.8987, + "step": 2079 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999998525947744e-05, + "loss": 0.995, + "step": 2080 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999994103791404e-05, + "loss": 0.9864, + "step": 2081 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999986733532287e-05, + "loss": 0.8768, + "step": 2082 + }, + { + "epoch": 0.35, + "learning_rate": 1.999997641517256e-05, + "loss": 0.945, + "step": 2083 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999963148715276e-05, + "loss": 0.9197, + "step": 2084 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999946934164337e-05, + "loss": 0.9735, + "step": 2085 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999927771524526e-05, + "loss": 0.9903, + "step": 2086 + }, + { + "epoch": 0.35, + "learning_rate": 1.999990566080149e-05, + "loss": 0.95, + "step": 2087 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999880602001752e-05, + "loss": 0.9967, + "step": 2088 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999852595132698e-05, + "loss": 0.9844, + "step": 2089 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999821640202584e-05, + "loss": 0.961, + "step": 2090 + }, + { + "epoch": 0.35, + "learning_rate": 1.999978773722054e-05, + "loss": 0.9694, + "step": 2091 + }, + { + "epoch": 0.35, + "learning_rate": 1.999975088619655e-05, + "loss": 0.9558, + "step": 2092 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999711087141484e-05, + "loss": 0.9955, + "step": 2093 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999668340067083e-05, + "loss": 0.9372, + "step": 2094 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999622644985937e-05, + "loss": 0.9977, + "step": 2095 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999574001911526e-05, + "loss": 0.9133, + "step": 2096 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999522410858187e-05, + "loss": 0.9336, + "step": 2097 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999467871841126e-05, + "loss": 0.9781, + "step": 2098 + }, + { + "epoch": 0.35, + "learning_rate": 1.999941038487643e-05, + "loss": 0.9462, + "step": 2099 + }, + { + "epoch": 0.35, + "learning_rate": 1.999934994998104e-05, + "loss": 0.8491, + "step": 2100 + }, + { + "epoch": 0.35, + "learning_rate": 1.999928656717278e-05, + "loss": 0.9412, + "step": 2101 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999220236470326e-05, + "loss": 0.9719, + "step": 2102 + }, + { + "epoch": 0.35, + "learning_rate": 1.999915095789324e-05, + "loss": 0.9759, + "step": 2103 + }, + { + "epoch": 0.35, + "learning_rate": 1.9999078731461942e-05, + "loss": 0.951, + "step": 2104 + }, + { + "epoch": 0.35, + "learning_rate": 1.999900355719773e-05, + "loss": 0.9807, + "step": 2105 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998925435122757e-05, + "loss": 0.9845, + "step": 2106 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998844365260068e-05, + "loss": 0.9743, + "step": 2107 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998760347633556e-05, + "loss": 1.013, + "step": 2108 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998673382267983e-05, + "loss": 0.967, + "step": 2109 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998583469189e-05, + "loss": 0.9648, + "step": 2110 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998490608423106e-05, + "loss": 0.9696, + "step": 2111 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998394799997684e-05, + "loss": 0.9583, + "step": 2112 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998296043940973e-05, + "loss": 0.9451, + "step": 2113 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998194340282088e-05, + "loss": 0.9213, + "step": 2114 + }, + { + "epoch": 0.35, + "learning_rate": 1.9998089689051014e-05, + "loss": 0.9282, + "step": 2115 + }, + { + "epoch": 0.35, + "learning_rate": 1.9997982090278606e-05, + "loss": 1.0565, + "step": 2116 + }, + { + "epoch": 0.35, + "learning_rate": 1.999787154399658e-05, + "loss": 0.9466, + "step": 2117 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997758050237533e-05, + "loss": 0.988, + "step": 2118 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997641609034916e-05, + "loss": 0.9047, + "step": 2119 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997522220423062e-05, + "loss": 0.9399, + "step": 2120 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997399884437167e-05, + "loss": 0.94, + "step": 2121 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997274601113294e-05, + "loss": 0.9678, + "step": 2122 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997146370488384e-05, + "loss": 0.9633, + "step": 2123 + }, + { + "epoch": 0.36, + "learning_rate": 1.9997015192600235e-05, + "loss": 0.9667, + "step": 2124 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996881067487526e-05, + "loss": 0.964, + "step": 2125 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996743995189793e-05, + "loss": 0.3857, + "step": 2126 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996603975747443e-05, + "loss": 0.9595, + "step": 2127 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996461009201767e-05, + "loss": 0.3784, + "step": 2128 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996315095594903e-05, + "loss": 0.9302, + "step": 2129 + }, + { + "epoch": 0.36, + "learning_rate": 1.999616623496987e-05, + "loss": 0.3875, + "step": 2130 + }, + { + "epoch": 0.36, + "learning_rate": 1.9996014427370556e-05, + "loss": 0.9582, + "step": 2131 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995859672841713e-05, + "loss": 1.0083, + "step": 2132 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995701971428967e-05, + "loss": 0.9549, + "step": 2133 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995541323178808e-05, + "loss": 0.9243, + "step": 2134 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995377728138597e-05, + "loss": 0.9543, + "step": 2135 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995211186356564e-05, + "loss": 0.9756, + "step": 2136 + }, + { + "epoch": 0.36, + "learning_rate": 1.9995041697881805e-05, + "loss": 0.9326, + "step": 2137 + }, + { + "epoch": 0.36, + "learning_rate": 1.999486926276429e-05, + "loss": 0.9087, + "step": 2138 + }, + { + "epoch": 0.36, + "learning_rate": 1.9994693881054853e-05, + "loss": 0.9643, + "step": 2139 + }, + { + "epoch": 0.36, + "learning_rate": 1.99945155528052e-05, + "loss": 1.0203, + "step": 2140 + }, + { + "epoch": 0.36, + "learning_rate": 1.9994334278067905e-05, + "loss": 1.0366, + "step": 2141 + }, + { + "epoch": 0.36, + "learning_rate": 1.9994150056896405e-05, + "loss": 0.9883, + "step": 2142 + }, + { + "epoch": 0.36, + "learning_rate": 1.9993962889345012e-05, + "loss": 0.971, + "step": 2143 + }, + { + "epoch": 0.36, + "learning_rate": 1.9993772775468907e-05, + "loss": 0.3625, + "step": 2144 + }, + { + "epoch": 0.36, + "learning_rate": 1.9993579715324135e-05, + "loss": 0.9937, + "step": 2145 + }, + { + "epoch": 0.36, + "learning_rate": 1.9993383708967618e-05, + "loss": 0.9292, + "step": 2146 + }, + { + "epoch": 0.36, + "learning_rate": 1.9993184756457132e-05, + "loss": 0.9348, + "step": 2147 + }, + { + "epoch": 0.36, + "learning_rate": 1.9992982857851334e-05, + "loss": 1.0105, + "step": 2148 + }, + { + "epoch": 0.36, + "learning_rate": 1.9992778013209752e-05, + "loss": 0.9786, + "step": 2149 + }, + { + "epoch": 0.36, + "learning_rate": 1.9992570222592768e-05, + "loss": 1.021, + "step": 2150 + }, + { + "epoch": 0.36, + "learning_rate": 1.999235948606164e-05, + "loss": 1.0123, + "step": 2151 + }, + { + "epoch": 0.36, + "learning_rate": 1.9992145803678505e-05, + "loss": 1.0262, + "step": 2152 + }, + { + "epoch": 0.36, + "learning_rate": 1.999192917550635e-05, + "loss": 0.959, + "step": 2153 + }, + { + "epoch": 0.36, + "learning_rate": 1.9991709601609042e-05, + "loss": 0.9799, + "step": 2154 + }, + { + "epoch": 0.36, + "learning_rate": 1.9991487082051314e-05, + "loss": 0.9666, + "step": 2155 + }, + { + "epoch": 0.36, + "learning_rate": 1.9991261616898767e-05, + "loss": 0.9313, + "step": 2156 + }, + { + "epoch": 0.36, + "learning_rate": 1.9991033206217868e-05, + "loss": 0.9498, + "step": 2157 + }, + { + "epoch": 0.36, + "learning_rate": 1.999080185007596e-05, + "loss": 0.9167, + "step": 2158 + }, + { + "epoch": 0.36, + "learning_rate": 1.9990567548541245e-05, + "loss": 0.3667, + "step": 2159 + }, + { + "epoch": 0.36, + "learning_rate": 1.9990330301682798e-05, + "loss": 0.9086, + "step": 2160 + }, + { + "epoch": 0.36, + "learning_rate": 1.9990090109570565e-05, + "loss": 0.9641, + "step": 2161 + }, + { + "epoch": 0.36, + "learning_rate": 1.9989846972275356e-05, + "loss": 0.9684, + "step": 2162 + }, + { + "epoch": 0.36, + "learning_rate": 1.9989600889868846e-05, + "loss": 0.9063, + "step": 2163 + }, + { + "epoch": 0.36, + "learning_rate": 1.9989351862423585e-05, + "loss": 0.9301, + "step": 2164 + }, + { + "epoch": 0.36, + "learning_rate": 1.9989099890012993e-05, + "loss": 0.9582, + "step": 2165 + }, + { + "epoch": 0.36, + "learning_rate": 1.998884497271135e-05, + "loss": 0.9469, + "step": 2166 + }, + { + "epoch": 0.36, + "learning_rate": 1.998858711059381e-05, + "loss": 0.9516, + "step": 2167 + }, + { + "epoch": 0.36, + "learning_rate": 1.998832630373639e-05, + "loss": 0.922, + "step": 2168 + }, + { + "epoch": 0.36, + "learning_rate": 1.9988062552215983e-05, + "loss": 0.8531, + "step": 2169 + }, + { + "epoch": 0.36, + "learning_rate": 1.9987795856110347e-05, + "loss": 0.9349, + "step": 2170 + }, + { + "epoch": 0.36, + "learning_rate": 1.99875262154981e-05, + "loss": 0.9598, + "step": 2171 + }, + { + "epoch": 0.36, + "learning_rate": 1.9987253630458738e-05, + "loss": 0.9098, + "step": 2172 + }, + { + "epoch": 0.36, + "learning_rate": 1.9986978101072627e-05, + "loss": 0.9451, + "step": 2173 + }, + { + "epoch": 0.36, + "learning_rate": 1.9986699627420987e-05, + "loss": 0.982, + "step": 2174 + }, + { + "epoch": 0.36, + "learning_rate": 1.998641820958592e-05, + "loss": 0.9374, + "step": 2175 + }, + { + "epoch": 0.36, + "learning_rate": 1.9986133847650392e-05, + "loss": 0.9544, + "step": 2176 + }, + { + "epoch": 0.36, + "learning_rate": 1.9985846541698234e-05, + "loss": 0.9654, + "step": 2177 + }, + { + "epoch": 0.37, + "learning_rate": 1.9985556291814147e-05, + "loss": 0.9463, + "step": 2178 + }, + { + "epoch": 0.37, + "learning_rate": 1.99852630980837e-05, + "loss": 1.0037, + "step": 2179 + }, + { + "epoch": 0.37, + "learning_rate": 1.998496696059333e-05, + "loss": 0.9683, + "step": 2180 + }, + { + "epoch": 0.37, + "learning_rate": 1.9984667879430336e-05, + "loss": 0.9171, + "step": 2181 + }, + { + "epoch": 0.37, + "learning_rate": 1.99843658546829e-05, + "loss": 0.9104, + "step": 2182 + }, + { + "epoch": 0.37, + "learning_rate": 1.998406088644006e-05, + "loss": 0.9568, + "step": 2183 + }, + { + "epoch": 0.37, + "learning_rate": 1.9983752974791715e-05, + "loss": 0.9796, + "step": 2184 + }, + { + "epoch": 0.37, + "learning_rate": 1.9983442119828647e-05, + "loss": 0.9713, + "step": 2185 + }, + { + "epoch": 0.37, + "learning_rate": 1.99831283216425e-05, + "loss": 1.0112, + "step": 2186 + }, + { + "epoch": 0.37, + "learning_rate": 1.9982811580325784e-05, + "loss": 0.9947, + "step": 2187 + }, + { + "epoch": 0.37, + "learning_rate": 1.9982491895971878e-05, + "loss": 0.902, + "step": 2188 + }, + { + "epoch": 0.37, + "learning_rate": 1.9982169268675024e-05, + "loss": 0.9699, + "step": 2189 + }, + { + "epoch": 0.37, + "learning_rate": 1.9981843698530345e-05, + "loss": 1.0531, + "step": 2190 + }, + { + "epoch": 0.37, + "learning_rate": 1.9981515185633812e-05, + "loss": 0.9512, + "step": 2191 + }, + { + "epoch": 0.37, + "learning_rate": 1.9981183730082283e-05, + "loss": 0.9653, + "step": 2192 + }, + { + "epoch": 0.37, + "learning_rate": 1.9980849331973467e-05, + "loss": 0.9082, + "step": 2193 + }, + { + "epoch": 0.37, + "learning_rate": 1.9980511991405955e-05, + "loss": 0.8944, + "step": 2194 + }, + { + "epoch": 0.37, + "learning_rate": 1.9980171708479194e-05, + "loss": 0.9982, + "step": 2195 + }, + { + "epoch": 0.37, + "learning_rate": 1.9979828483293504e-05, + "loss": 0.9731, + "step": 2196 + }, + { + "epoch": 0.37, + "learning_rate": 1.997948231595007e-05, + "loss": 0.9695, + "step": 2197 + }, + { + "epoch": 0.37, + "learning_rate": 1.9979133206550953e-05, + "loss": 0.9372, + "step": 2198 + }, + { + "epoch": 0.37, + "learning_rate": 1.9978781155199062e-05, + "loss": 1.0128, + "step": 2199 + }, + { + "epoch": 0.37, + "learning_rate": 1.9978426161998195e-05, + "loss": 1.0321, + "step": 2200 + }, + { + "epoch": 0.37, + "learning_rate": 1.9978068227053002e-05, + "loss": 0.9499, + "step": 2201 + }, + { + "epoch": 0.37, + "learning_rate": 1.9977707350469012e-05, + "loss": 1.0056, + "step": 2202 + }, + { + "epoch": 0.37, + "learning_rate": 1.9977343532352608e-05, + "loss": 0.9772, + "step": 2203 + }, + { + "epoch": 0.37, + "learning_rate": 1.9976976772811055e-05, + "loss": 0.9922, + "step": 2204 + }, + { + "epoch": 0.37, + "learning_rate": 1.997660707195247e-05, + "loss": 0.9724, + "step": 2205 + }, + { + "epoch": 0.37, + "learning_rate": 1.997623442988585e-05, + "loss": 0.9293, + "step": 2206 + }, + { + "epoch": 0.37, + "learning_rate": 1.9975858846721054e-05, + "loss": 0.883, + "step": 2207 + }, + { + "epoch": 0.37, + "learning_rate": 1.9975480322568802e-05, + "loss": 0.922, + "step": 2208 + }, + { + "epoch": 0.37, + "learning_rate": 1.9975098857540695e-05, + "loss": 0.9172, + "step": 2209 + }, + { + "epoch": 0.37, + "learning_rate": 1.9974714451749185e-05, + "loss": 0.9544, + "step": 2210 + }, + { + "epoch": 0.37, + "learning_rate": 1.9974327105307605e-05, + "loss": 0.9932, + "step": 2211 + }, + { + "epoch": 0.37, + "learning_rate": 1.9973936818330145e-05, + "loss": 0.9194, + "step": 2212 + }, + { + "epoch": 0.37, + "learning_rate": 1.9973543590931867e-05, + "loss": 0.9448, + "step": 2213 + }, + { + "epoch": 0.37, + "learning_rate": 1.9973147423228698e-05, + "loss": 0.9984, + "step": 2214 + }, + { + "epoch": 0.37, + "learning_rate": 1.9972748315337434e-05, + "loss": 0.9747, + "step": 2215 + }, + { + "epoch": 0.37, + "learning_rate": 1.9972346267375736e-05, + "loss": 0.971, + "step": 2216 + }, + { + "epoch": 0.37, + "learning_rate": 1.997194127946213e-05, + "loss": 0.9676, + "step": 2217 + }, + { + "epoch": 0.37, + "learning_rate": 1.9971533351716012e-05, + "loss": 0.9679, + "step": 2218 + }, + { + "epoch": 0.37, + "learning_rate": 1.9971122484257643e-05, + "loss": 0.9395, + "step": 2219 + }, + { + "epoch": 0.37, + "learning_rate": 1.9970708677208147e-05, + "loss": 0.9105, + "step": 2220 + }, + { + "epoch": 0.37, + "learning_rate": 1.9970291930689526e-05, + "loss": 0.9887, + "step": 2221 + }, + { + "epoch": 0.37, + "learning_rate": 1.9969872244824638e-05, + "loss": 0.959, + "step": 2222 + }, + { + "epoch": 0.37, + "learning_rate": 1.9969449619737212e-05, + "loss": 0.9753, + "step": 2223 + }, + { + "epoch": 0.37, + "learning_rate": 1.9969024055551843e-05, + "loss": 0.9526, + "step": 2224 + }, + { + "epoch": 0.37, + "learning_rate": 1.9968595552393983e-05, + "loss": 0.9303, + "step": 2225 + }, + { + "epoch": 0.37, + "learning_rate": 1.996816411038997e-05, + "loss": 0.9165, + "step": 2226 + }, + { + "epoch": 0.37, + "learning_rate": 1.9967729729666993e-05, + "loss": 1.0088, + "step": 2227 + }, + { + "epoch": 0.37, + "learning_rate": 1.9967292410353117e-05, + "loss": 0.9459, + "step": 2228 + }, + { + "epoch": 0.37, + "learning_rate": 1.996685215257726e-05, + "loss": 0.9475, + "step": 2229 + }, + { + "epoch": 0.37, + "learning_rate": 1.9966408956469215e-05, + "loss": 0.9664, + "step": 2230 + }, + { + "epoch": 0.37, + "learning_rate": 1.996596282215965e-05, + "loss": 0.9786, + "step": 2231 + }, + { + "epoch": 0.37, + "learning_rate": 1.996551374978008e-05, + "loss": 1.0069, + "step": 2232 + }, + { + "epoch": 0.37, + "learning_rate": 1.9965061739462903e-05, + "loss": 0.3851, + "step": 2233 + }, + { + "epoch": 0.37, + "learning_rate": 1.9964606791341373e-05, + "loss": 0.8833, + "step": 2234 + }, + { + "epoch": 0.37, + "learning_rate": 1.9964148905549617e-05, + "loss": 0.9302, + "step": 2235 + }, + { + "epoch": 0.37, + "learning_rate": 1.996368808222262e-05, + "loss": 0.9493, + "step": 2236 + }, + { + "epoch": 0.38, + "learning_rate": 1.996322432149624e-05, + "loss": 0.9756, + "step": 2237 + }, + { + "epoch": 0.38, + "learning_rate": 1.9962757623507197e-05, + "loss": 0.986, + "step": 2238 + }, + { + "epoch": 0.38, + "learning_rate": 1.996228798839308e-05, + "loss": 0.9261, + "step": 2239 + }, + { + "epoch": 0.38, + "learning_rate": 1.996181541629234e-05, + "loss": 0.9511, + "step": 2240 + }, + { + "epoch": 0.38, + "learning_rate": 1.99613399073443e-05, + "loss": 0.3786, + "step": 2241 + }, + { + "epoch": 0.38, + "learning_rate": 1.9960861461689146e-05, + "loss": 0.9235, + "step": 2242 + }, + { + "epoch": 0.38, + "learning_rate": 1.996038007946792e-05, + "loss": 1.0645, + "step": 2243 + }, + { + "epoch": 0.38, + "learning_rate": 1.995989576082255e-05, + "loss": 0.9461, + "step": 2244 + }, + { + "epoch": 0.38, + "learning_rate": 1.9959408505895807e-05, + "loss": 0.9143, + "step": 2245 + }, + { + "epoch": 0.38, + "learning_rate": 1.9958918314831347e-05, + "loss": 0.913, + "step": 2246 + }, + { + "epoch": 0.38, + "learning_rate": 1.995842518777368e-05, + "loss": 0.9848, + "step": 2247 + }, + { + "epoch": 0.38, + "learning_rate": 1.9957929124868184e-05, + "loss": 0.8998, + "step": 2248 + }, + { + "epoch": 0.38, + "learning_rate": 1.995743012626111e-05, + "loss": 0.938, + "step": 2249 + }, + { + "epoch": 0.38, + "learning_rate": 1.995692819209956e-05, + "loss": 0.9419, + "step": 2250 + }, + { + "epoch": 0.38, + "learning_rate": 1.9956423322531515e-05, + "loss": 0.8603, + "step": 2251 + }, + { + "epoch": 0.38, + "learning_rate": 1.995591551770581e-05, + "loss": 0.9509, + "step": 2252 + }, + { + "epoch": 0.38, + "learning_rate": 1.9955404777772157e-05, + "loss": 0.9502, + "step": 2253 + }, + { + "epoch": 0.38, + "learning_rate": 1.9954891102881126e-05, + "loss": 0.9731, + "step": 2254 + }, + { + "epoch": 0.38, + "learning_rate": 1.9954374493184153e-05, + "loss": 0.3504, + "step": 2255 + }, + { + "epoch": 0.38, + "learning_rate": 1.9953854948833537e-05, + "loss": 0.9582, + "step": 2256 + }, + { + "epoch": 0.38, + "learning_rate": 1.9953332469982453e-05, + "loss": 1.0013, + "step": 2257 + }, + { + "epoch": 0.38, + "learning_rate": 1.9952807056784925e-05, + "loss": 0.9589, + "step": 2258 + }, + { + "epoch": 0.38, + "learning_rate": 1.9952278709395855e-05, + "loss": 0.9891, + "step": 2259 + }, + { + "epoch": 0.38, + "learning_rate": 1.9951747427971e-05, + "loss": 0.8324, + "step": 2260 + }, + { + "epoch": 0.38, + "learning_rate": 1.9951213212666993e-05, + "loss": 0.3682, + "step": 2261 + }, + { + "epoch": 0.38, + "learning_rate": 1.9950676063641325e-05, + "loss": 0.9716, + "step": 2262 + }, + { + "epoch": 0.38, + "learning_rate": 1.9950135981052353e-05, + "loss": 0.9935, + "step": 2263 + }, + { + "epoch": 0.38, + "learning_rate": 1.99495929650593e-05, + "loss": 0.9983, + "step": 2264 + }, + { + "epoch": 0.38, + "learning_rate": 1.994904701582225e-05, + "loss": 0.8988, + "step": 2265 + }, + { + "epoch": 0.38, + "learning_rate": 1.9948498133502155e-05, + "loss": 1.0271, + "step": 2266 + }, + { + "epoch": 0.38, + "learning_rate": 1.9947946318260834e-05, + "loss": 0.9087, + "step": 2267 + }, + { + "epoch": 0.38, + "learning_rate": 1.9947391570260964e-05, + "loss": 0.965, + "step": 2268 + }, + { + "epoch": 0.38, + "learning_rate": 1.994683388966609e-05, + "loss": 0.9843, + "step": 2269 + }, + { + "epoch": 0.38, + "learning_rate": 1.994627327664063e-05, + "loss": 1.0084, + "step": 2270 + }, + { + "epoch": 0.38, + "learning_rate": 1.9945709731349852e-05, + "loss": 0.9343, + "step": 2271 + }, + { + "epoch": 0.38, + "learning_rate": 1.9945143253959896e-05, + "loss": 0.908, + "step": 2272 + }, + { + "epoch": 0.38, + "learning_rate": 1.9944573844637763e-05, + "loss": 0.9273, + "step": 2273 + }, + { + "epoch": 0.38, + "learning_rate": 1.9944001503551326e-05, + "loss": 0.9376, + "step": 2274 + }, + { + "epoch": 0.38, + "learning_rate": 1.9943426230869313e-05, + "loss": 0.9519, + "step": 2275 + }, + { + "epoch": 0.38, + "learning_rate": 1.9942848026761324e-05, + "loss": 0.9095, + "step": 2276 + }, + { + "epoch": 0.38, + "learning_rate": 1.9942266891397817e-05, + "loss": 0.9652, + "step": 2277 + }, + { + "epoch": 0.38, + "learning_rate": 1.9941682824950117e-05, + "loss": 0.8928, + "step": 2278 + }, + { + "epoch": 0.38, + "learning_rate": 1.9941095827590415e-05, + "loss": 0.964, + "step": 2279 + }, + { + "epoch": 0.38, + "learning_rate": 1.994050589949176e-05, + "loss": 0.963, + "step": 2280 + }, + { + "epoch": 0.38, + "learning_rate": 1.9939913040828073e-05, + "loss": 0.9221, + "step": 2281 + }, + { + "epoch": 0.38, + "learning_rate": 1.9939317251774134e-05, + "loss": 1.0532, + "step": 2282 + }, + { + "epoch": 0.38, + "learning_rate": 1.9938718532505584e-05, + "loss": 0.3635, + "step": 2283 + }, + { + "epoch": 0.38, + "learning_rate": 1.993811688319894e-05, + "loss": 0.9531, + "step": 2284 + }, + { + "epoch": 0.38, + "learning_rate": 1.993751230403156e-05, + "loss": 0.9654, + "step": 2285 + }, + { + "epoch": 0.38, + "learning_rate": 1.9936904795181696e-05, + "loss": 0.9415, + "step": 2286 + }, + { + "epoch": 0.38, + "learning_rate": 1.993629435682844e-05, + "loss": 0.9963, + "step": 2287 + }, + { + "epoch": 0.38, + "learning_rate": 1.993568098915176e-05, + "loss": 0.9559, + "step": 2288 + }, + { + "epoch": 0.38, + "learning_rate": 1.9935064692332476e-05, + "loss": 1.0141, + "step": 2289 + }, + { + "epoch": 0.38, + "learning_rate": 1.9934445466552283e-05, + "loss": 1.0247, + "step": 2290 + }, + { + "epoch": 0.38, + "learning_rate": 1.993382331199374e-05, + "loss": 0.8953, + "step": 2291 + }, + { + "epoch": 0.38, + "learning_rate": 1.9933198228840254e-05, + "loss": 0.9448, + "step": 2292 + }, + { + "epoch": 0.38, + "learning_rate": 1.9932570217276115e-05, + "loss": 0.9222, + "step": 2293 + }, + { + "epoch": 0.38, + "learning_rate": 1.9931939277486463e-05, + "loss": 0.9476, + "step": 2294 + }, + { + "epoch": 0.38, + "learning_rate": 1.9931305409657307e-05, + "loss": 0.9516, + "step": 2295 + }, + { + "epoch": 0.38, + "learning_rate": 1.993066861397552e-05, + "loss": 0.9502, + "step": 2296 + }, + { + "epoch": 0.39, + "learning_rate": 1.9930028890628832e-05, + "loss": 0.9722, + "step": 2297 + }, + { + "epoch": 0.39, + "learning_rate": 1.9929386239805843e-05, + "loss": 0.9744, + "step": 2298 + }, + { + "epoch": 0.39, + "learning_rate": 1.992874066169601e-05, + "loss": 0.9647, + "step": 2299 + }, + { + "epoch": 0.39, + "learning_rate": 1.9928092156489664e-05, + "loss": 0.9929, + "step": 2300 + }, + { + "epoch": 0.39, + "learning_rate": 1.992744072437798e-05, + "loss": 0.9865, + "step": 2301 + }, + { + "epoch": 0.39, + "learning_rate": 1.992678636555301e-05, + "loss": 0.9362, + "step": 2302 + }, + { + "epoch": 0.39, + "learning_rate": 1.9926129080207676e-05, + "loss": 1.0174, + "step": 2303 + }, + { + "epoch": 0.39, + "learning_rate": 1.9925468868535743e-05, + "loss": 0.973, + "step": 2304 + }, + { + "epoch": 0.39, + "learning_rate": 1.9924805730731847e-05, + "loss": 0.9095, + "step": 2305 + }, + { + "epoch": 0.39, + "learning_rate": 1.992413966699149e-05, + "loss": 0.9517, + "step": 2306 + }, + { + "epoch": 0.39, + "learning_rate": 1.992347067751104e-05, + "loss": 0.9935, + "step": 2307 + }, + { + "epoch": 0.39, + "learning_rate": 1.9922798762487715e-05, + "loss": 0.9282, + "step": 2308 + }, + { + "epoch": 0.39, + "learning_rate": 1.9922123922119606e-05, + "loss": 1.0262, + "step": 2309 + }, + { + "epoch": 0.39, + "learning_rate": 1.9921446156605663e-05, + "loss": 1.0159, + "step": 2310 + }, + { + "epoch": 0.39, + "learning_rate": 1.99207654661457e-05, + "loss": 0.933, + "step": 2311 + }, + { + "epoch": 0.39, + "learning_rate": 1.9920081850940382e-05, + "loss": 0.9594, + "step": 2312 + }, + { + "epoch": 0.39, + "learning_rate": 1.9919395311191256e-05, + "loss": 0.9754, + "step": 2313 + }, + { + "epoch": 0.39, + "learning_rate": 1.991870584710072e-05, + "loss": 0.9917, + "step": 2314 + }, + { + "epoch": 0.39, + "learning_rate": 1.9918013458872036e-05, + "loss": 0.9498, + "step": 2315 + }, + { + "epoch": 0.39, + "learning_rate": 1.991731814670932e-05, + "loss": 0.9597, + "step": 2316 + }, + { + "epoch": 0.39, + "learning_rate": 1.9916619910817564e-05, + "loss": 0.9465, + "step": 2317 + }, + { + "epoch": 0.39, + "learning_rate": 1.9915918751402615e-05, + "loss": 0.9025, + "step": 2318 + }, + { + "epoch": 0.39, + "learning_rate": 1.991521466867118e-05, + "loss": 1.0017, + "step": 2319 + }, + { + "epoch": 0.39, + "learning_rate": 1.991450766283083e-05, + "loss": 0.9464, + "step": 2320 + }, + { + "epoch": 0.39, + "learning_rate": 1.991379773409e-05, + "loss": 0.957, + "step": 2321 + }, + { + "epoch": 0.39, + "learning_rate": 1.9913084882657978e-05, + "loss": 0.9471, + "step": 2322 + }, + { + "epoch": 0.39, + "learning_rate": 1.9912369108744927e-05, + "loss": 0.9593, + "step": 2323 + }, + { + "epoch": 0.39, + "learning_rate": 1.9911650412561862e-05, + "loss": 0.9432, + "step": 2324 + }, + { + "epoch": 0.39, + "learning_rate": 1.991092879432066e-05, + "loss": 0.9745, + "step": 2325 + }, + { + "epoch": 0.39, + "learning_rate": 1.9910204254234068e-05, + "loss": 0.3915, + "step": 2326 + }, + { + "epoch": 0.39, + "learning_rate": 1.990947679251568e-05, + "loss": 0.9173, + "step": 2327 + }, + { + "epoch": 0.39, + "learning_rate": 1.990874640937997e-05, + "loss": 0.3916, + "step": 2328 + }, + { + "epoch": 0.39, + "learning_rate": 1.990801310504225e-05, + "loss": 1.0295, + "step": 2329 + }, + { + "epoch": 0.39, + "learning_rate": 1.9907276879718715e-05, + "loss": 0.9363, + "step": 2330 + }, + { + "epoch": 0.39, + "learning_rate": 1.9906537733626407e-05, + "loss": 0.992, + "step": 2331 + }, + { + "epoch": 0.39, + "learning_rate": 1.9905795666983234e-05, + "loss": 1.0234, + "step": 2332 + }, + { + "epoch": 0.39, + "learning_rate": 1.990505068000797e-05, + "loss": 0.9253, + "step": 2333 + }, + { + "epoch": 0.39, + "learning_rate": 1.9904302772920238e-05, + "loss": 0.9243, + "step": 2334 + }, + { + "epoch": 0.39, + "learning_rate": 1.9903551945940537e-05, + "loss": 0.9584, + "step": 2335 + }, + { + "epoch": 0.39, + "learning_rate": 1.990279819929021e-05, + "loss": 0.9585, + "step": 2336 + }, + { + "epoch": 0.39, + "learning_rate": 1.990204153319147e-05, + "loss": 0.9256, + "step": 2337 + }, + { + "epoch": 0.39, + "learning_rate": 1.9901281947867402e-05, + "loss": 0.9351, + "step": 2338 + }, + { + "epoch": 0.39, + "learning_rate": 1.9900519443541927e-05, + "loss": 0.9837, + "step": 2339 + }, + { + "epoch": 0.39, + "learning_rate": 1.9899754020439843e-05, + "loss": 0.9696, + "step": 2340 + }, + { + "epoch": 0.39, + "learning_rate": 1.9898985678786802e-05, + "loss": 0.9366, + "step": 2341 + }, + { + "epoch": 0.39, + "learning_rate": 1.989821441880933e-05, + "loss": 0.9727, + "step": 2342 + }, + { + "epoch": 0.39, + "learning_rate": 1.989744024073479e-05, + "loss": 0.9224, + "step": 2343 + }, + { + "epoch": 0.39, + "learning_rate": 1.9896663144791423e-05, + "loss": 0.9892, + "step": 2344 + }, + { + "epoch": 0.39, + "learning_rate": 1.9895883131208325e-05, + "loss": 0.9737, + "step": 2345 + }, + { + "epoch": 0.39, + "learning_rate": 1.989510020021545e-05, + "loss": 0.911, + "step": 2346 + }, + { + "epoch": 0.39, + "learning_rate": 1.989431435204362e-05, + "loss": 0.9366, + "step": 2347 + }, + { + "epoch": 0.39, + "learning_rate": 1.98935255869245e-05, + "loss": 0.9455, + "step": 2348 + }, + { + "epoch": 0.39, + "learning_rate": 1.9892733905090642e-05, + "loss": 0.929, + "step": 2349 + }, + { + "epoch": 0.39, + "learning_rate": 1.989193930677543e-05, + "loss": 0.96, + "step": 2350 + }, + { + "epoch": 0.39, + "learning_rate": 1.989114179221312e-05, + "loss": 0.9422, + "step": 2351 + }, + { + "epoch": 0.39, + "learning_rate": 1.9890341361638837e-05, + "loss": 0.9305, + "step": 2352 + }, + { + "epoch": 0.39, + "learning_rate": 1.9889538015288545e-05, + "loss": 0.9295, + "step": 2353 + }, + { + "epoch": 0.39, + "learning_rate": 1.988873175339909e-05, + "loss": 1.0277, + "step": 2354 + }, + { + "epoch": 0.39, + "learning_rate": 1.9887922576208155e-05, + "loss": 0.8651, + "step": 2355 + }, + { + "epoch": 0.39, + "learning_rate": 1.9887110483954303e-05, + "loss": 0.9895, + "step": 2356 + }, + { + "epoch": 0.4, + "learning_rate": 1.988629547687694e-05, + "loss": 0.9396, + "step": 2357 + }, + { + "epoch": 0.4, + "learning_rate": 1.9885477555216346e-05, + "loss": 0.4074, + "step": 2358 + }, + { + "epoch": 0.4, + "learning_rate": 1.9884656719213645e-05, + "loss": 0.9467, + "step": 2359 + }, + { + "epoch": 0.4, + "learning_rate": 1.9883832969110837e-05, + "loss": 0.9542, + "step": 2360 + }, + { + "epoch": 0.4, + "learning_rate": 1.9883006305150767e-05, + "loss": 0.9153, + "step": 2361 + }, + { + "epoch": 0.4, + "learning_rate": 1.9882176727577143e-05, + "loss": 0.986, + "step": 2362 + }, + { + "epoch": 0.4, + "learning_rate": 1.9881344236634537e-05, + "loss": 0.9601, + "step": 2363 + }, + { + "epoch": 0.4, + "learning_rate": 1.988050883256837e-05, + "loss": 1.0071, + "step": 2364 + }, + { + "epoch": 0.4, + "learning_rate": 1.9879670515624937e-05, + "loss": 0.9565, + "step": 2365 + }, + { + "epoch": 0.4, + "learning_rate": 1.9878829286051374e-05, + "loss": 0.9445, + "step": 2366 + }, + { + "epoch": 0.4, + "learning_rate": 1.9877985144095687e-05, + "loss": 0.9301, + "step": 2367 + }, + { + "epoch": 0.4, + "learning_rate": 1.987713809000674e-05, + "loss": 0.923, + "step": 2368 + }, + { + "epoch": 0.4, + "learning_rate": 1.9876288124034252e-05, + "loss": 0.936, + "step": 2369 + }, + { + "epoch": 0.4, + "learning_rate": 1.9875435246428798e-05, + "loss": 0.9398, + "step": 2370 + }, + { + "epoch": 0.4, + "learning_rate": 1.9874579457441824e-05, + "loss": 0.9691, + "step": 2371 + }, + { + "epoch": 0.4, + "learning_rate": 1.9873720757325613e-05, + "loss": 0.9124, + "step": 2372 + }, + { + "epoch": 0.4, + "learning_rate": 1.9872859146333333e-05, + "loss": 1.0072, + "step": 2373 + }, + { + "epoch": 0.4, + "learning_rate": 1.9871994624718985e-05, + "loss": 0.9989, + "step": 2374 + }, + { + "epoch": 0.4, + "learning_rate": 1.987112719273745e-05, + "loss": 0.9425, + "step": 2375 + }, + { + "epoch": 0.4, + "learning_rate": 1.987025685064444e-05, + "loss": 0.4132, + "step": 2376 + }, + { + "epoch": 0.4, + "learning_rate": 1.9869383598696555e-05, + "loss": 0.9367, + "step": 2377 + }, + { + "epoch": 0.4, + "learning_rate": 1.9868507437151235e-05, + "loss": 0.997, + "step": 2378 + }, + { + "epoch": 0.4, + "learning_rate": 1.9867628366266776e-05, + "loss": 0.4003, + "step": 2379 + }, + { + "epoch": 0.4, + "learning_rate": 1.9866746386302345e-05, + "loss": 0.9467, + "step": 2380 + }, + { + "epoch": 0.4, + "learning_rate": 1.9865861497517957e-05, + "loss": 0.934, + "step": 2381 + }, + { + "epoch": 0.4, + "learning_rate": 1.9864973700174483e-05, + "loss": 0.9725, + "step": 2382 + }, + { + "epoch": 0.4, + "learning_rate": 1.9864082994533658e-05, + "loss": 0.3884, + "step": 2383 + }, + { + "epoch": 0.4, + "learning_rate": 1.9863189380858068e-05, + "loss": 0.3816, + "step": 2384 + }, + { + "epoch": 0.4, + "learning_rate": 1.9862292859411164e-05, + "loss": 0.9582, + "step": 2385 + }, + { + "epoch": 0.4, + "learning_rate": 1.9861393430457248e-05, + "loss": 0.9836, + "step": 2386 + }, + { + "epoch": 0.4, + "learning_rate": 1.986049109426148e-05, + "loss": 0.3854, + "step": 2387 + }, + { + "epoch": 0.4, + "learning_rate": 1.985958585108988e-05, + "loss": 0.9439, + "step": 2388 + }, + { + "epoch": 0.4, + "learning_rate": 1.9858677701209324e-05, + "loss": 0.8711, + "step": 2389 + }, + { + "epoch": 0.4, + "learning_rate": 1.985776664488754e-05, + "loss": 0.9656, + "step": 2390 + }, + { + "epoch": 0.4, + "learning_rate": 1.985685268239312e-05, + "loss": 1.0065, + "step": 2391 + }, + { + "epoch": 0.4, + "learning_rate": 1.9855935813995504e-05, + "loss": 0.9616, + "step": 2392 + }, + { + "epoch": 0.4, + "learning_rate": 1.9855016039965006e-05, + "loss": 0.3499, + "step": 2393 + }, + { + "epoch": 0.4, + "learning_rate": 1.9854093360572775e-05, + "loss": 0.9801, + "step": 2394 + }, + { + "epoch": 0.4, + "learning_rate": 1.9853167776090832e-05, + "loss": 0.9592, + "step": 2395 + }, + { + "epoch": 0.4, + "learning_rate": 1.9852239286792047e-05, + "loss": 0.3963, + "step": 2396 + }, + { + "epoch": 0.4, + "learning_rate": 1.9851307892950146e-05, + "loss": 0.9769, + "step": 2397 + }, + { + "epoch": 0.4, + "learning_rate": 1.9850373594839717e-05, + "loss": 0.9566, + "step": 2398 + }, + { + "epoch": 0.4, + "learning_rate": 1.98494363927362e-05, + "loss": 0.9542, + "step": 2399 + }, + { + "epoch": 0.4, + "learning_rate": 1.9848496286915894e-05, + "loss": 0.8902, + "step": 2400 + }, + { + "epoch": 0.4, + "learning_rate": 1.9847553277655948e-05, + "loss": 0.9706, + "step": 2401 + }, + { + "epoch": 0.4, + "learning_rate": 1.984660736523437e-05, + "loss": 0.9829, + "step": 2402 + }, + { + "epoch": 0.4, + "learning_rate": 1.984565854993003e-05, + "loss": 0.9643, + "step": 2403 + }, + { + "epoch": 0.4, + "learning_rate": 1.9844706832022646e-05, + "loss": 0.9744, + "step": 2404 + }, + { + "epoch": 0.4, + "learning_rate": 1.9843752211792792e-05, + "loss": 0.9221, + "step": 2405 + }, + { + "epoch": 0.4, + "learning_rate": 1.9842794689521905e-05, + "loss": 0.9784, + "step": 2406 + }, + { + "epoch": 0.4, + "learning_rate": 1.984183426549227e-05, + "loss": 0.9264, + "step": 2407 + }, + { + "epoch": 0.4, + "learning_rate": 1.984087093998703e-05, + "loss": 1.0185, + "step": 2408 + }, + { + "epoch": 0.4, + "learning_rate": 1.9839904713290186e-05, + "loss": 0.9137, + "step": 2409 + }, + { + "epoch": 0.4, + "learning_rate": 1.9838935585686586e-05, + "loss": 0.9184, + "step": 2410 + }, + { + "epoch": 0.4, + "learning_rate": 1.9837963557461945e-05, + "loss": 0.9418, + "step": 2411 + }, + { + "epoch": 0.4, + "learning_rate": 1.983698862890282e-05, + "loss": 0.9684, + "step": 2412 + }, + { + "epoch": 0.4, + "learning_rate": 1.9836010800296642e-05, + "loss": 0.97, + "step": 2413 + }, + { + "epoch": 0.4, + "learning_rate": 1.9835030071931672e-05, + "loss": 1.0213, + "step": 2414 + }, + { + "epoch": 0.4, + "learning_rate": 1.9834046444097046e-05, + "loss": 0.9374, + "step": 2415 + }, + { + "epoch": 0.41, + "learning_rate": 1.9833059917082744e-05, + "loss": 0.9585, + "step": 2416 + }, + { + "epoch": 0.41, + "learning_rate": 1.983207049117961e-05, + "loss": 0.9834, + "step": 2417 + }, + { + "epoch": 0.41, + "learning_rate": 1.9831078166679334e-05, + "loss": 0.9665, + "step": 2418 + }, + { + "epoch": 0.41, + "learning_rate": 1.9830082943874464e-05, + "loss": 0.9404, + "step": 2419 + }, + { + "epoch": 0.41, + "learning_rate": 1.98290848230584e-05, + "loss": 0.959, + "step": 2420 + }, + { + "epoch": 0.41, + "learning_rate": 1.98280838045254e-05, + "loss": 0.4105, + "step": 2421 + }, + { + "epoch": 0.41, + "learning_rate": 1.9827079888570573e-05, + "loss": 0.9951, + "step": 2422 + }, + { + "epoch": 0.41, + "learning_rate": 1.982607307548989e-05, + "loss": 0.9295, + "step": 2423 + }, + { + "epoch": 0.41, + "learning_rate": 1.9825063365580165e-05, + "loss": 0.9342, + "step": 2424 + }, + { + "epoch": 0.41, + "learning_rate": 1.9824050759139068e-05, + "loss": 0.9755, + "step": 2425 + }, + { + "epoch": 0.41, + "learning_rate": 1.9823035256465136e-05, + "loss": 0.3381, + "step": 2426 + }, + { + "epoch": 0.41, + "learning_rate": 1.982201685785774e-05, + "loss": 0.8984, + "step": 2427 + }, + { + "epoch": 0.41, + "learning_rate": 1.9820995563617117e-05, + "loss": 0.374, + "step": 2428 + }, + { + "epoch": 0.41, + "learning_rate": 1.9819971374044356e-05, + "loss": 1.0316, + "step": 2429 + }, + { + "epoch": 0.41, + "learning_rate": 1.9818944289441402e-05, + "loss": 0.9626, + "step": 2430 + }, + { + "epoch": 0.41, + "learning_rate": 1.9817914310111044e-05, + "loss": 0.9471, + "step": 2431 + }, + { + "epoch": 0.41, + "learning_rate": 1.981688143635694e-05, + "loss": 0.9814, + "step": 2432 + }, + { + "epoch": 0.41, + "learning_rate": 1.981584566848358e-05, + "loss": 0.9779, + "step": 2433 + }, + { + "epoch": 0.41, + "learning_rate": 1.981480700679633e-05, + "loss": 0.9415, + "step": 2434 + }, + { + "epoch": 0.41, + "learning_rate": 1.981376545160139e-05, + "loss": 0.8894, + "step": 2435 + }, + { + "epoch": 0.41, + "learning_rate": 1.9812721003205825e-05, + "loss": 0.9091, + "step": 2436 + }, + { + "epoch": 0.41, + "learning_rate": 1.981167366191755e-05, + "loss": 0.8746, + "step": 2437 + }, + { + "epoch": 0.41, + "learning_rate": 1.981062342804533e-05, + "loss": 0.9332, + "step": 2438 + }, + { + "epoch": 0.41, + "learning_rate": 1.980957030189879e-05, + "loss": 0.9291, + "step": 2439 + }, + { + "epoch": 0.41, + "learning_rate": 1.9808514283788398e-05, + "loss": 0.9427, + "step": 2440 + }, + { + "epoch": 0.41, + "learning_rate": 1.9807455374025477e-05, + "loss": 0.9238, + "step": 2441 + }, + { + "epoch": 0.41, + "learning_rate": 1.980639357292221e-05, + "loss": 0.395, + "step": 2442 + }, + { + "epoch": 0.41, + "learning_rate": 1.9805328880791626e-05, + "loss": 0.9814, + "step": 2443 + }, + { + "epoch": 0.41, + "learning_rate": 1.9804261297947606e-05, + "loss": 1.0104, + "step": 2444 + }, + { + "epoch": 0.41, + "learning_rate": 1.980319082470488e-05, + "loss": 1.0043, + "step": 2445 + }, + { + "epoch": 0.41, + "learning_rate": 1.9802117461379047e-05, + "loss": 0.9862, + "step": 2446 + }, + { + "epoch": 0.41, + "learning_rate": 1.9801041208286535e-05, + "loss": 0.8913, + "step": 2447 + }, + { + "epoch": 0.41, + "learning_rate": 1.9799962065744634e-05, + "loss": 0.9603, + "step": 2448 + }, + { + "epoch": 0.41, + "learning_rate": 1.9798880034071493e-05, + "loss": 0.9365, + "step": 2449 + }, + { + "epoch": 0.41, + "learning_rate": 1.9797795113586104e-05, + "loss": 0.988, + "step": 2450 + }, + { + "epoch": 0.41, + "learning_rate": 1.9796707304608314e-05, + "loss": 1.0031, + "step": 2451 + }, + { + "epoch": 0.41, + "learning_rate": 1.9795616607458817e-05, + "loss": 0.9317, + "step": 2452 + }, + { + "epoch": 0.41, + "learning_rate": 1.9794523022459168e-05, + "loss": 1.0148, + "step": 2453 + }, + { + "epoch": 0.41, + "learning_rate": 1.9793426549931757e-05, + "loss": 0.923, + "step": 2454 + }, + { + "epoch": 0.41, + "learning_rate": 1.9792327190199843e-05, + "loss": 0.981, + "step": 2455 + }, + { + "epoch": 0.41, + "learning_rate": 1.979122494358753e-05, + "loss": 0.9461, + "step": 2456 + }, + { + "epoch": 0.41, + "learning_rate": 1.979011981041977e-05, + "loss": 0.9498, + "step": 2457 + }, + { + "epoch": 0.41, + "learning_rate": 1.9789011791022365e-05, + "loss": 0.9581, + "step": 2458 + }, + { + "epoch": 0.41, + "learning_rate": 1.978790088572197e-05, + "loss": 0.9866, + "step": 2459 + }, + { + "epoch": 0.41, + "learning_rate": 1.9786787094846097e-05, + "loss": 0.9887, + "step": 2460 + }, + { + "epoch": 0.41, + "learning_rate": 1.97856704187231e-05, + "loss": 0.3509, + "step": 2461 + }, + { + "epoch": 0.41, + "learning_rate": 1.978455085768219e-05, + "loss": 0.9697, + "step": 2462 + }, + { + "epoch": 0.41, + "learning_rate": 1.9783428412053422e-05, + "loss": 1.0128, + "step": 2463 + }, + { + "epoch": 0.41, + "learning_rate": 1.9782303082167705e-05, + "loss": 0.9576, + "step": 2464 + }, + { + "epoch": 0.41, + "learning_rate": 1.9781174868356797e-05, + "loss": 0.9048, + "step": 2465 + }, + { + "epoch": 0.41, + "learning_rate": 1.9780043770953306e-05, + "loss": 0.9354, + "step": 2466 + }, + { + "epoch": 0.41, + "learning_rate": 1.9778909790290697e-05, + "loss": 0.9295, + "step": 2467 + }, + { + "epoch": 0.41, + "learning_rate": 1.9777772926703276e-05, + "loss": 0.9285, + "step": 2468 + }, + { + "epoch": 0.41, + "learning_rate": 1.97766331805262e-05, + "loss": 0.9504, + "step": 2469 + }, + { + "epoch": 0.41, + "learning_rate": 1.9775490552095485e-05, + "loss": 0.902, + "step": 2470 + }, + { + "epoch": 0.41, + "learning_rate": 1.9774345041747982e-05, + "loss": 0.9668, + "step": 2471 + }, + { + "epoch": 0.41, + "learning_rate": 1.9773196649821405e-05, + "loss": 0.9486, + "step": 2472 + }, + { + "epoch": 0.41, + "learning_rate": 1.9772045376654308e-05, + "loss": 0.3669, + "step": 2473 + }, + { + "epoch": 0.41, + "learning_rate": 1.97708912225861e-05, + "loss": 0.9302, + "step": 2474 + }, + { + "epoch": 0.41, + "learning_rate": 1.976973418795704e-05, + "loss": 0.9274, + "step": 2475 + }, + { + "epoch": 0.42, + "learning_rate": 1.9768574273108228e-05, + "loss": 0.9719, + "step": 2476 + }, + { + "epoch": 0.42, + "learning_rate": 1.9767411478381623e-05, + "loss": 0.9209, + "step": 2477 + }, + { + "epoch": 0.42, + "learning_rate": 1.9766245804120032e-05, + "loss": 0.9466, + "step": 2478 + }, + { + "epoch": 0.42, + "learning_rate": 1.9765077250667104e-05, + "loss": 0.9783, + "step": 2479 + }, + { + "epoch": 0.42, + "learning_rate": 1.976390581836734e-05, + "loss": 0.9468, + "step": 2480 + }, + { + "epoch": 0.42, + "learning_rate": 1.9762731507566094e-05, + "loss": 0.9075, + "step": 2481 + }, + { + "epoch": 0.42, + "learning_rate": 1.976155431860956e-05, + "loss": 0.9629, + "step": 2482 + }, + { + "epoch": 0.42, + "learning_rate": 1.9760374251844793e-05, + "loss": 0.984, + "step": 2483 + }, + { + "epoch": 0.42, + "learning_rate": 1.9759191307619677e-05, + "loss": 0.9634, + "step": 2484 + }, + { + "epoch": 0.42, + "learning_rate": 1.975800548628297e-05, + "loss": 0.9311, + "step": 2485 + }, + { + "epoch": 0.42, + "learning_rate": 1.975681678818426e-05, + "loss": 0.9785, + "step": 2486 + }, + { + "epoch": 0.42, + "learning_rate": 1.9755625213673984e-05, + "loss": 0.367, + "step": 2487 + }, + { + "epoch": 0.42, + "learning_rate": 1.9754430763103428e-05, + "loss": 0.9318, + "step": 2488 + }, + { + "epoch": 0.42, + "learning_rate": 1.975323343682474e-05, + "loss": 1.0139, + "step": 2489 + }, + { + "epoch": 0.42, + "learning_rate": 1.9752033235190895e-05, + "loss": 0.9671, + "step": 2490 + }, + { + "epoch": 0.42, + "learning_rate": 1.9750830158555728e-05, + "loss": 0.3675, + "step": 2491 + }, + { + "epoch": 0.42, + "learning_rate": 1.9749624207273916e-05, + "loss": 0.9081, + "step": 2492 + }, + { + "epoch": 0.42, + "learning_rate": 1.9748415381700988e-05, + "loss": 0.9577, + "step": 2493 + }, + { + "epoch": 0.42, + "learning_rate": 1.974720368219332e-05, + "loss": 0.8928, + "step": 2494 + }, + { + "epoch": 0.42, + "learning_rate": 1.9745989109108134e-05, + "loss": 0.9397, + "step": 2495 + }, + { + "epoch": 0.42, + "learning_rate": 1.9744771662803496e-05, + "loss": 1.007, + "step": 2496 + }, + { + "epoch": 0.42, + "learning_rate": 1.9743551343638324e-05, + "loss": 0.965, + "step": 2497 + }, + { + "epoch": 0.42, + "learning_rate": 1.974232815197238e-05, + "loss": 0.9845, + "step": 2498 + }, + { + "epoch": 0.42, + "learning_rate": 1.974110208816627e-05, + "loss": 0.9891, + "step": 2499 + }, + { + "epoch": 0.42, + "learning_rate": 1.9739873152581456e-05, + "loss": 0.971, + "step": 2500 + }, + { + "epoch": 0.42, + "learning_rate": 1.9738641345580237e-05, + "loss": 0.9494, + "step": 2501 + }, + { + "epoch": 0.42, + "learning_rate": 1.9737406667525766e-05, + "loss": 0.9321, + "step": 2502 + }, + { + "epoch": 0.42, + "learning_rate": 1.973616911878204e-05, + "loss": 0.935, + "step": 2503 + }, + { + "epoch": 0.42, + "learning_rate": 1.9734928699713897e-05, + "loss": 0.9318, + "step": 2504 + }, + { + "epoch": 0.42, + "learning_rate": 1.9733685410687027e-05, + "loss": 0.937, + "step": 2505 + }, + { + "epoch": 0.42, + "learning_rate": 1.9732439252067967e-05, + "loss": 0.978, + "step": 2506 + }, + { + "epoch": 0.42, + "learning_rate": 1.973119022422409e-05, + "loss": 0.9845, + "step": 2507 + }, + { + "epoch": 0.42, + "learning_rate": 1.9729938327523635e-05, + "loss": 0.9328, + "step": 2508 + }, + { + "epoch": 0.42, + "learning_rate": 1.9728683562335663e-05, + "loss": 0.8828, + "step": 2509 + }, + { + "epoch": 0.42, + "learning_rate": 1.97274259290301e-05, + "loss": 0.3788, + "step": 2510 + }, + { + "epoch": 0.42, + "learning_rate": 1.97261654279777e-05, + "loss": 0.9514, + "step": 2511 + }, + { + "epoch": 0.42, + "learning_rate": 1.972490205955008e-05, + "loss": 0.9182, + "step": 2512 + }, + { + "epoch": 0.42, + "learning_rate": 1.972363582411969e-05, + "loss": 0.9666, + "step": 2513 + }, + { + "epoch": 0.42, + "learning_rate": 1.9722366722059836e-05, + "loss": 0.9653, + "step": 2514 + }, + { + "epoch": 0.42, + "learning_rate": 1.9721094753744655e-05, + "loss": 0.8975, + "step": 2515 + }, + { + "epoch": 0.42, + "learning_rate": 1.9719819919549138e-05, + "loss": 0.9182, + "step": 2516 + }, + { + "epoch": 0.42, + "learning_rate": 1.9718542219849122e-05, + "loss": 0.9711, + "step": 2517 + }, + { + "epoch": 0.42, + "learning_rate": 1.9717261655021286e-05, + "loss": 0.9501, + "step": 2518 + }, + { + "epoch": 0.42, + "learning_rate": 1.9715978225443147e-05, + "loss": 0.9267, + "step": 2519 + }, + { + "epoch": 0.42, + "learning_rate": 1.9714691931493087e-05, + "loss": 1.0092, + "step": 2520 + }, + { + "epoch": 0.42, + "learning_rate": 1.9713402773550307e-05, + "loss": 0.9643, + "step": 2521 + }, + { + "epoch": 0.42, + "learning_rate": 1.971211075199487e-05, + "loss": 0.9492, + "step": 2522 + }, + { + "epoch": 0.42, + "learning_rate": 1.971081586720767e-05, + "loss": 0.9993, + "step": 2523 + }, + { + "epoch": 0.42, + "learning_rate": 1.9709518119570465e-05, + "loss": 0.967, + "step": 2524 + }, + { + "epoch": 0.42, + "learning_rate": 1.9708217509465837e-05, + "loss": 0.917, + "step": 2525 + }, + { + "epoch": 0.42, + "learning_rate": 1.970691403727722e-05, + "loss": 1.004, + "step": 2526 + }, + { + "epoch": 0.42, + "learning_rate": 1.970560770338889e-05, + "loss": 0.8732, + "step": 2527 + }, + { + "epoch": 0.42, + "learning_rate": 1.9704298508185973e-05, + "loss": 0.968, + "step": 2528 + }, + { + "epoch": 0.42, + "learning_rate": 1.9702986452054426e-05, + "loss": 0.9081, + "step": 2529 + }, + { + "epoch": 0.42, + "learning_rate": 1.9701671535381064e-05, + "loss": 0.9975, + "step": 2530 + }, + { + "epoch": 0.42, + "learning_rate": 1.9700353758553536e-05, + "loss": 0.8707, + "step": 2531 + }, + { + "epoch": 0.42, + "learning_rate": 1.9699033121960333e-05, + "loss": 0.9222, + "step": 2532 + }, + { + "epoch": 0.42, + "learning_rate": 1.9697709625990793e-05, + "loss": 1.0052, + "step": 2533 + }, + { + "epoch": 0.42, + "learning_rate": 1.96963832710351e-05, + "loss": 0.3556, + "step": 2534 + }, + { + "epoch": 0.42, + "learning_rate": 1.969505405748428e-05, + "loss": 0.9385, + "step": 2535 + }, + { + "epoch": 0.43, + "learning_rate": 1.9693721985730186e-05, + "loss": 0.9508, + "step": 2536 + }, + { + "epoch": 0.43, + "learning_rate": 1.969238705616554e-05, + "loss": 0.9723, + "step": 2537 + }, + { + "epoch": 0.43, + "learning_rate": 1.969104926918389e-05, + "loss": 0.9873, + "step": 2538 + }, + { + "epoch": 0.43, + "learning_rate": 1.9689708625179627e-05, + "loss": 0.9667, + "step": 2539 + }, + { + "epoch": 0.43, + "learning_rate": 1.9688365124547986e-05, + "loss": 0.9177, + "step": 2540 + }, + { + "epoch": 0.43, + "learning_rate": 1.9687018767685048e-05, + "loss": 0.9971, + "step": 2541 + }, + { + "epoch": 0.43, + "learning_rate": 1.9685669554987732e-05, + "loss": 0.8696, + "step": 2542 + }, + { + "epoch": 0.43, + "learning_rate": 1.96843174868538e-05, + "loss": 1.0297, + "step": 2543 + }, + { + "epoch": 0.43, + "learning_rate": 1.9682962563681857e-05, + "loss": 0.9569, + "step": 2544 + }, + { + "epoch": 0.43, + "learning_rate": 1.9681604785871347e-05, + "loss": 1.0142, + "step": 2545 + }, + { + "epoch": 0.43, + "learning_rate": 1.9680244153822558e-05, + "loss": 0.9663, + "step": 2546 + }, + { + "epoch": 0.43, + "learning_rate": 1.9678880667936617e-05, + "loss": 0.9518, + "step": 2547 + }, + { + "epoch": 0.43, + "learning_rate": 1.9677514328615496e-05, + "loss": 0.9759, + "step": 2548 + }, + { + "epoch": 0.43, + "learning_rate": 1.9676145136262007e-05, + "loss": 0.8913, + "step": 2549 + }, + { + "epoch": 0.43, + "learning_rate": 1.9674773091279794e-05, + "loss": 0.9923, + "step": 2550 + }, + { + "epoch": 0.43, + "learning_rate": 1.967339819407336e-05, + "loss": 0.9767, + "step": 2551 + }, + { + "epoch": 0.43, + "learning_rate": 1.9672020445048036e-05, + "loss": 0.967, + "step": 2552 + }, + { + "epoch": 0.43, + "learning_rate": 1.9670639844609997e-05, + "loss": 0.401, + "step": 2553 + }, + { + "epoch": 0.43, + "learning_rate": 1.9669256393166258e-05, + "loss": 0.9252, + "step": 2554 + }, + { + "epoch": 0.43, + "learning_rate": 1.966787009112467e-05, + "loss": 0.9228, + "step": 2555 + }, + { + "epoch": 0.43, + "learning_rate": 1.966648093889394e-05, + "loss": 0.935, + "step": 2556 + }, + { + "epoch": 0.43, + "learning_rate": 1.9665088936883596e-05, + "loss": 0.9969, + "step": 2557 + }, + { + "epoch": 0.43, + "learning_rate": 1.966369408550402e-05, + "loss": 0.9334, + "step": 2558 + }, + { + "epoch": 0.43, + "learning_rate": 1.9662296385166422e-05, + "loss": 0.9056, + "step": 2559 + }, + { + "epoch": 0.43, + "learning_rate": 1.9660895836282866e-05, + "loss": 1.0198, + "step": 2560 + }, + { + "epoch": 0.43, + "learning_rate": 1.965949243926624e-05, + "loss": 0.3753, + "step": 2561 + }, + { + "epoch": 0.43, + "learning_rate": 1.965808619453029e-05, + "loss": 0.3492, + "step": 2562 + }, + { + "epoch": 0.43, + "learning_rate": 1.965667710248959e-05, + "loss": 0.956, + "step": 2563 + }, + { + "epoch": 0.43, + "learning_rate": 1.9655265163559547e-05, + "loss": 0.9899, + "step": 2564 + }, + { + "epoch": 0.43, + "learning_rate": 1.965385037815642e-05, + "loss": 0.9704, + "step": 2565 + }, + { + "epoch": 0.43, + "learning_rate": 1.9652432746697306e-05, + "loss": 1.012, + "step": 2566 + }, + { + "epoch": 0.43, + "learning_rate": 1.9651012269600133e-05, + "loss": 0.982, + "step": 2567 + }, + { + "epoch": 0.43, + "learning_rate": 1.9649588947283674e-05, + "loss": 0.9015, + "step": 2568 + }, + { + "epoch": 0.43, + "learning_rate": 1.9648162780167542e-05, + "loss": 0.3723, + "step": 2569 + }, + { + "epoch": 0.43, + "learning_rate": 1.964673376867218e-05, + "loss": 0.9656, + "step": 2570 + }, + { + "epoch": 0.43, + "learning_rate": 1.9645301913218878e-05, + "loss": 0.94, + "step": 2571 + }, + { + "epoch": 0.43, + "learning_rate": 1.9643867214229764e-05, + "loss": 0.8702, + "step": 2572 + }, + { + "epoch": 0.43, + "learning_rate": 1.9642429672127802e-05, + "loss": 0.9663, + "step": 2573 + }, + { + "epoch": 0.43, + "learning_rate": 1.9640989287336795e-05, + "loss": 0.9567, + "step": 2574 + }, + { + "epoch": 0.43, + "learning_rate": 1.963954606028138e-05, + "loss": 0.9195, + "step": 2575 + }, + { + "epoch": 0.43, + "learning_rate": 1.9638099991387038e-05, + "loss": 0.9061, + "step": 2576 + }, + { + "epoch": 0.43, + "learning_rate": 1.963665108108008e-05, + "loss": 0.946, + "step": 2577 + }, + { + "epoch": 0.43, + "learning_rate": 1.9635199329787672e-05, + "loss": 0.8839, + "step": 2578 + }, + { + "epoch": 0.43, + "learning_rate": 1.9633744737937796e-05, + "loss": 0.9867, + "step": 2579 + }, + { + "epoch": 0.43, + "learning_rate": 1.9632287305959278e-05, + "loss": 0.9047, + "step": 2580 + }, + { + "epoch": 0.43, + "learning_rate": 1.9630827034281794e-05, + "loss": 1.0044, + "step": 2581 + }, + { + "epoch": 0.43, + "learning_rate": 1.962936392333584e-05, + "loss": 0.9764, + "step": 2582 + }, + { + "epoch": 0.43, + "learning_rate": 1.962789797355276e-05, + "loss": 0.9407, + "step": 2583 + }, + { + "epoch": 0.43, + "learning_rate": 1.962642918536473e-05, + "loss": 0.8837, + "step": 2584 + }, + { + "epoch": 0.43, + "learning_rate": 1.9624957559204763e-05, + "loss": 0.986, + "step": 2585 + }, + { + "epoch": 0.43, + "learning_rate": 1.962348309550671e-05, + "loss": 0.9217, + "step": 2586 + }, + { + "epoch": 0.43, + "learning_rate": 1.962200579470526e-05, + "loss": 0.9772, + "step": 2587 + }, + { + "epoch": 0.43, + "learning_rate": 1.9620525657235938e-05, + "loss": 0.9755, + "step": 2588 + }, + { + "epoch": 0.43, + "learning_rate": 1.96190426835351e-05, + "loss": 0.9525, + "step": 2589 + }, + { + "epoch": 0.43, + "learning_rate": 1.9617556874039945e-05, + "loss": 0.9224, + "step": 2590 + }, + { + "epoch": 0.43, + "learning_rate": 1.9616068229188507e-05, + "loss": 0.9234, + "step": 2591 + }, + { + "epoch": 0.43, + "learning_rate": 1.961457674941965e-05, + "loss": 0.9612, + "step": 2592 + }, + { + "epoch": 0.43, + "learning_rate": 1.9613082435173078e-05, + "loss": 0.9174, + "step": 2593 + }, + { + "epoch": 0.43, + "learning_rate": 1.961158528688933e-05, + "loss": 0.3895, + "step": 2594 + }, + { + "epoch": 0.44, + "learning_rate": 1.9610085305009784e-05, + "loss": 0.9426, + "step": 2595 + }, + { + "epoch": 0.44, + "learning_rate": 1.960858248997665e-05, + "loss": 0.9471, + "step": 2596 + }, + { + "epoch": 0.44, + "learning_rate": 1.960707684223297e-05, + "loss": 0.8957, + "step": 2597 + }, + { + "epoch": 0.44, + "learning_rate": 1.9605568362222632e-05, + "loss": 0.9395, + "step": 2598 + }, + { + "epoch": 0.44, + "learning_rate": 1.9604057050390342e-05, + "loss": 1.0202, + "step": 2599 + }, + { + "epoch": 0.44, + "learning_rate": 1.9602542907181657e-05, + "loss": 0.9667, + "step": 2600 + }, + { + "epoch": 0.44, + "learning_rate": 1.9601025933042962e-05, + "loss": 0.3977, + "step": 2601 + }, + { + "epoch": 0.44, + "learning_rate": 1.959950612842147e-05, + "loss": 0.3566, + "step": 2602 + }, + { + "epoch": 0.44, + "learning_rate": 1.959798349376525e-05, + "loss": 0.8882, + "step": 2603 + }, + { + "epoch": 0.44, + "learning_rate": 1.9596458029523174e-05, + "loss": 0.9412, + "step": 2604 + }, + { + "epoch": 0.44, + "learning_rate": 1.9594929736144978e-05, + "loss": 0.9594, + "step": 2605 + }, + { + "epoch": 0.44, + "learning_rate": 1.9593398614081206e-05, + "loss": 0.9418, + "step": 2606 + }, + { + "epoch": 0.44, + "learning_rate": 1.959186466378326e-05, + "loss": 0.9005, + "step": 2607 + }, + { + "epoch": 0.44, + "learning_rate": 1.959032788570336e-05, + "loss": 0.8764, + "step": 2608 + }, + { + "epoch": 0.44, + "learning_rate": 1.9588788280294568e-05, + "loss": 0.9584, + "step": 2609 + }, + { + "epoch": 0.44, + "learning_rate": 1.9587245848010766e-05, + "loss": 0.9285, + "step": 2610 + }, + { + "epoch": 0.44, + "learning_rate": 1.9585700589306688e-05, + "loss": 0.9275, + "step": 2611 + }, + { + "epoch": 0.44, + "learning_rate": 1.9584152504637893e-05, + "loss": 1.0371, + "step": 2612 + }, + { + "epoch": 0.44, + "learning_rate": 1.9582601594460767e-05, + "loss": 0.872, + "step": 2613 + }, + { + "epoch": 0.44, + "learning_rate": 1.958104785923254e-05, + "loss": 0.9135, + "step": 2614 + }, + { + "epoch": 0.44, + "learning_rate": 1.9579491299411263e-05, + "loss": 0.9338, + "step": 2615 + }, + { + "epoch": 0.44, + "learning_rate": 1.9577931915455827e-05, + "loss": 0.9528, + "step": 2616 + }, + { + "epoch": 0.44, + "learning_rate": 1.9576369707825962e-05, + "loss": 0.9411, + "step": 2617 + }, + { + "epoch": 0.44, + "learning_rate": 1.9574804676982215e-05, + "loss": 0.9629, + "step": 2618 + }, + { + "epoch": 0.44, + "learning_rate": 1.957323682338598e-05, + "loss": 0.9608, + "step": 2619 + }, + { + "epoch": 0.44, + "learning_rate": 1.957166614749947e-05, + "loss": 0.965, + "step": 2620 + }, + { + "epoch": 0.44, + "learning_rate": 1.9570092649785746e-05, + "loss": 1.0813, + "step": 2621 + }, + { + "epoch": 0.44, + "learning_rate": 1.956851633070868e-05, + "loss": 0.9416, + "step": 2622 + }, + { + "epoch": 0.44, + "learning_rate": 1.9566937190732994e-05, + "loss": 0.9794, + "step": 2623 + }, + { + "epoch": 0.44, + "learning_rate": 1.9565355230324238e-05, + "loss": 0.9312, + "step": 2624 + }, + { + "epoch": 0.44, + "learning_rate": 1.9563770449948782e-05, + "loss": 0.9261, + "step": 2625 + }, + { + "epoch": 0.44, + "learning_rate": 1.9562182850073844e-05, + "loss": 0.9316, + "step": 2626 + }, + { + "epoch": 0.44, + "learning_rate": 1.9560592431167456e-05, + "loss": 0.9132, + "step": 2627 + }, + { + "epoch": 0.44, + "learning_rate": 1.95589991936985e-05, + "loss": 0.9181, + "step": 2628 + }, + { + "epoch": 0.44, + "learning_rate": 1.9557403138136672e-05, + "loss": 0.9343, + "step": 2629 + }, + { + "epoch": 0.44, + "learning_rate": 1.955580426495251e-05, + "loss": 0.969, + "step": 2630 + }, + { + "epoch": 0.44, + "learning_rate": 1.9554202574617375e-05, + "loss": 0.9437, + "step": 2631 + }, + { + "epoch": 0.44, + "learning_rate": 1.9552598067603466e-05, + "loss": 0.9352, + "step": 2632 + }, + { + "epoch": 0.44, + "learning_rate": 1.9550990744383805e-05, + "loss": 0.8712, + "step": 2633 + }, + { + "epoch": 0.44, + "learning_rate": 1.954938060543225e-05, + "loss": 0.9724, + "step": 2634 + }, + { + "epoch": 0.44, + "learning_rate": 1.9547767651223486e-05, + "loss": 0.3813, + "step": 2635 + }, + { + "epoch": 0.44, + "learning_rate": 1.954615188223303e-05, + "loss": 0.3876, + "step": 2636 + }, + { + "epoch": 0.44, + "learning_rate": 1.9544533298937222e-05, + "loss": 0.9905, + "step": 2637 + }, + { + "epoch": 0.44, + "learning_rate": 1.9542911901813247e-05, + "loss": 0.8834, + "step": 2638 + }, + { + "epoch": 0.44, + "learning_rate": 1.95412876913391e-05, + "loss": 0.9874, + "step": 2639 + }, + { + "epoch": 0.44, + "learning_rate": 1.953966066799362e-05, + "loss": 0.9311, + "step": 2640 + }, + { + "epoch": 0.44, + "learning_rate": 1.9538030832256468e-05, + "loss": 0.9861, + "step": 2641 + }, + { + "epoch": 0.44, + "learning_rate": 1.9536398184608143e-05, + "loss": 0.9528, + "step": 2642 + }, + { + "epoch": 0.44, + "learning_rate": 1.953476272552996e-05, + "loss": 0.9695, + "step": 2643 + }, + { + "epoch": 0.44, + "learning_rate": 1.9533124455504073e-05, + "loss": 0.943, + "step": 2644 + }, + { + "epoch": 0.44, + "learning_rate": 1.9531483375013457e-05, + "loss": 0.413, + "step": 2645 + }, + { + "epoch": 0.44, + "learning_rate": 1.9529839484541925e-05, + "loss": 0.9668, + "step": 2646 + }, + { + "epoch": 0.44, + "learning_rate": 1.952819278457411e-05, + "loss": 0.9493, + "step": 2647 + }, + { + "epoch": 0.44, + "learning_rate": 1.9526543275595476e-05, + "loss": 0.8851, + "step": 2648 + }, + { + "epoch": 0.44, + "learning_rate": 1.952489095809232e-05, + "loss": 0.9738, + "step": 2649 + }, + { + "epoch": 0.44, + "learning_rate": 1.9523235832551752e-05, + "loss": 0.9229, + "step": 2650 + }, + { + "epoch": 0.44, + "learning_rate": 1.9521577899461732e-05, + "loss": 0.9545, + "step": 2651 + }, + { + "epoch": 0.44, + "learning_rate": 1.951991715931103e-05, + "loss": 1.0013, + "step": 2652 + }, + { + "epoch": 0.44, + "learning_rate": 1.951825361258925e-05, + "loss": 0.9736, + "step": 2653 + }, + { + "epoch": 0.44, + "learning_rate": 1.9516587259786824e-05, + "loss": 0.9226, + "step": 2654 + }, + { + "epoch": 0.45, + "learning_rate": 1.9514918101395012e-05, + "loss": 0.8739, + "step": 2655 + }, + { + "epoch": 0.45, + "learning_rate": 1.9513246137905896e-05, + "loss": 0.9813, + "step": 2656 + }, + { + "epoch": 0.45, + "learning_rate": 1.9511571369812388e-05, + "loss": 0.9201, + "step": 2657 + }, + { + "epoch": 0.45, + "learning_rate": 1.9509893797608228e-05, + "loss": 0.8617, + "step": 2658 + }, + { + "epoch": 0.45, + "learning_rate": 1.9508213421787987e-05, + "loss": 0.9511, + "step": 2659 + }, + { + "epoch": 0.45, + "learning_rate": 1.950653024284705e-05, + "loss": 0.9142, + "step": 2660 + }, + { + "epoch": 0.45, + "learning_rate": 1.9504844261281638e-05, + "loss": 0.9426, + "step": 2661 + }, + { + "epoch": 0.45, + "learning_rate": 1.9503155477588794e-05, + "loss": 0.9584, + "step": 2662 + }, + { + "epoch": 0.45, + "learning_rate": 1.9501463892266396e-05, + "loss": 0.9256, + "step": 2663 + }, + { + "epoch": 0.45, + "learning_rate": 1.949976950581314e-05, + "loss": 1.0026, + "step": 2664 + }, + { + "epoch": 0.45, + "learning_rate": 1.949807231872854e-05, + "loss": 0.9367, + "step": 2665 + }, + { + "epoch": 0.45, + "learning_rate": 1.949637233151295e-05, + "loss": 0.8926, + "step": 2666 + }, + { + "epoch": 0.45, + "learning_rate": 1.9494669544667545e-05, + "loss": 0.9203, + "step": 2667 + }, + { + "epoch": 0.45, + "learning_rate": 1.9492963958694326e-05, + "loss": 0.903, + "step": 2668 + }, + { + "epoch": 0.45, + "learning_rate": 1.949125557409611e-05, + "loss": 0.9363, + "step": 2669 + }, + { + "epoch": 0.45, + "learning_rate": 1.9489544391376555e-05, + "loss": 0.9117, + "step": 2670 + }, + { + "epoch": 0.45, + "learning_rate": 1.948783041104013e-05, + "loss": 0.9372, + "step": 2671 + }, + { + "epoch": 0.45, + "learning_rate": 1.9486113633592136e-05, + "loss": 0.9746, + "step": 2672 + }, + { + "epoch": 0.45, + "learning_rate": 1.94843940595387e-05, + "loss": 0.949, + "step": 2673 + }, + { + "epoch": 0.45, + "learning_rate": 1.9482671689386766e-05, + "loss": 0.8996, + "step": 2674 + }, + { + "epoch": 0.45, + "learning_rate": 1.948094652364411e-05, + "loss": 0.9907, + "step": 2675 + }, + { + "epoch": 0.45, + "learning_rate": 1.9479218562819326e-05, + "loss": 0.9541, + "step": 2676 + }, + { + "epoch": 0.45, + "learning_rate": 1.9477487807421837e-05, + "loss": 0.8952, + "step": 2677 + }, + { + "epoch": 0.45, + "learning_rate": 1.9475754257961887e-05, + "loss": 0.9636, + "step": 2678 + }, + { + "epoch": 0.45, + "learning_rate": 1.9474017914950546e-05, + "loss": 0.9576, + "step": 2679 + }, + { + "epoch": 0.45, + "learning_rate": 1.9472278778899704e-05, + "loss": 0.9581, + "step": 2680 + }, + { + "epoch": 0.45, + "learning_rate": 1.9470536850322076e-05, + "loss": 0.9824, + "step": 2681 + }, + { + "epoch": 0.45, + "learning_rate": 1.94687921297312e-05, + "loss": 0.9101, + "step": 2682 + }, + { + "epoch": 0.45, + "learning_rate": 1.9467044617641445e-05, + "loss": 0.9833, + "step": 2683 + }, + { + "epoch": 0.45, + "learning_rate": 1.9465294314567987e-05, + "loss": 0.9043, + "step": 2684 + }, + { + "epoch": 0.45, + "learning_rate": 1.946354122102684e-05, + "loss": 0.9171, + "step": 2685 + }, + { + "epoch": 0.45, + "learning_rate": 1.946178533753483e-05, + "loss": 0.9205, + "step": 2686 + }, + { + "epoch": 0.45, + "learning_rate": 1.946002666460961e-05, + "loss": 0.9658, + "step": 2687 + }, + { + "epoch": 0.45, + "learning_rate": 1.9458265202769656e-05, + "loss": 0.8765, + "step": 2688 + }, + { + "epoch": 0.45, + "learning_rate": 1.945650095253427e-05, + "loss": 0.9151, + "step": 2689 + }, + { + "epoch": 0.45, + "learning_rate": 1.945473391442356e-05, + "loss": 0.9146, + "step": 2690 + }, + { + "epoch": 0.45, + "learning_rate": 1.9452964088958483e-05, + "loss": 0.9246, + "step": 2691 + }, + { + "epoch": 0.45, + "learning_rate": 1.945119147666079e-05, + "loss": 0.9732, + "step": 2692 + }, + { + "epoch": 0.45, + "learning_rate": 1.9449416078053067e-05, + "loss": 0.9274, + "step": 2693 + }, + { + "epoch": 0.45, + "learning_rate": 1.9447637893658727e-05, + "loss": 0.9394, + "step": 2694 + }, + { + "epoch": 0.45, + "learning_rate": 1.944585692400199e-05, + "loss": 0.9431, + "step": 2695 + }, + { + "epoch": 0.45, + "learning_rate": 1.9444073169607907e-05, + "loss": 0.9507, + "step": 2696 + }, + { + "epoch": 0.45, + "learning_rate": 1.9442286631002348e-05, + "loss": 0.9409, + "step": 2697 + }, + { + "epoch": 0.45, + "learning_rate": 1.9440497308712e-05, + "loss": 0.9612, + "step": 2698 + }, + { + "epoch": 0.45, + "learning_rate": 1.943870520326438e-05, + "loss": 0.8676, + "step": 2699 + }, + { + "epoch": 0.45, + "learning_rate": 1.9436910315187815e-05, + "loss": 0.9805, + "step": 2700 + }, + { + "epoch": 0.45, + "learning_rate": 1.9435112645011462e-05, + "loss": 0.9083, + "step": 2701 + }, + { + "epoch": 0.45, + "learning_rate": 1.943331219326528e-05, + "loss": 0.9918, + "step": 2702 + }, + { + "epoch": 0.45, + "learning_rate": 1.9431508960480075e-05, + "loss": 0.9995, + "step": 2703 + }, + { + "epoch": 0.45, + "learning_rate": 1.9429702947187455e-05, + "loss": 0.8881, + "step": 2704 + }, + { + "epoch": 0.45, + "learning_rate": 1.942789415391985e-05, + "loss": 0.8812, + "step": 2705 + }, + { + "epoch": 0.45, + "learning_rate": 1.942608258121051e-05, + "loss": 0.9597, + "step": 2706 + }, + { + "epoch": 0.45, + "learning_rate": 1.9424268229593507e-05, + "loss": 0.8788, + "step": 2707 + }, + { + "epoch": 0.45, + "learning_rate": 1.942245109960373e-05, + "loss": 0.9262, + "step": 2708 + }, + { + "epoch": 0.45, + "learning_rate": 1.9420631191776892e-05, + "loss": 0.9958, + "step": 2709 + }, + { + "epoch": 0.45, + "learning_rate": 1.9418808506649515e-05, + "loss": 0.9153, + "step": 2710 + }, + { + "epoch": 0.45, + "learning_rate": 1.941698304475895e-05, + "loss": 0.953, + "step": 2711 + }, + { + "epoch": 0.45, + "learning_rate": 1.9415154806643358e-05, + "loss": 0.9333, + "step": 2712 + }, + { + "epoch": 0.45, + "learning_rate": 1.9413323792841726e-05, + "loss": 0.9322, + "step": 2713 + }, + { + "epoch": 0.45, + "learning_rate": 1.941149000389386e-05, + "loss": 1.0025, + "step": 2714 + }, + { + "epoch": 0.46, + "learning_rate": 1.940965344034037e-05, + "loss": 0.9449, + "step": 2715 + }, + { + "epoch": 0.46, + "learning_rate": 1.94078141027227e-05, + "loss": 0.9752, + "step": 2716 + }, + { + "epoch": 0.46, + "learning_rate": 1.940597199158311e-05, + "loss": 0.9238, + "step": 2717 + }, + { + "epoch": 0.46, + "learning_rate": 1.9404127107464662e-05, + "loss": 0.9692, + "step": 2718 + }, + { + "epoch": 0.46, + "learning_rate": 1.9402279450911255e-05, + "loss": 0.934, + "step": 2719 + }, + { + "epoch": 0.46, + "learning_rate": 1.94004290224676e-05, + "loss": 0.9964, + "step": 2720 + }, + { + "epoch": 0.46, + "learning_rate": 1.939857582267922e-05, + "loss": 0.3924, + "step": 2721 + }, + { + "epoch": 0.46, + "learning_rate": 1.9396719852092457e-05, + "loss": 0.998, + "step": 2722 + }, + { + "epoch": 0.46, + "learning_rate": 1.9394861111254467e-05, + "loss": 0.9688, + "step": 2723 + }, + { + "epoch": 0.46, + "learning_rate": 1.939299960071323e-05, + "loss": 1.0011, + "step": 2724 + }, + { + "epoch": 0.46, + "learning_rate": 1.9391135321017542e-05, + "loss": 0.8818, + "step": 2725 + }, + { + "epoch": 0.46, + "learning_rate": 1.938926827271701e-05, + "loss": 0.9579, + "step": 2726 + }, + { + "epoch": 0.46, + "learning_rate": 1.938739845636205e-05, + "loss": 0.9208, + "step": 2727 + }, + { + "epoch": 0.46, + "learning_rate": 1.938552587250392e-05, + "loss": 0.9618, + "step": 2728 + }, + { + "epoch": 0.46, + "learning_rate": 1.9383650521694662e-05, + "loss": 0.9679, + "step": 2729 + }, + { + "epoch": 0.46, + "learning_rate": 1.938177240448716e-05, + "loss": 0.9688, + "step": 2730 + }, + { + "epoch": 0.46, + "learning_rate": 1.9379891521435098e-05, + "loss": 0.9121, + "step": 2731 + }, + { + "epoch": 0.46, + "learning_rate": 1.9378007873092975e-05, + "loss": 0.3756, + "step": 2732 + }, + { + "epoch": 0.46, + "learning_rate": 1.9376121460016123e-05, + "loss": 0.9452, + "step": 2733 + }, + { + "epoch": 0.46, + "learning_rate": 1.9374232282760664e-05, + "loss": 0.9172, + "step": 2734 + }, + { + "epoch": 0.46, + "learning_rate": 1.937234034188355e-05, + "loss": 0.9467, + "step": 2735 + }, + { + "epoch": 0.46, + "learning_rate": 1.9370445637942552e-05, + "loss": 0.9687, + "step": 2736 + }, + { + "epoch": 0.46, + "learning_rate": 1.9368548171496244e-05, + "loss": 0.8133, + "step": 2737 + }, + { + "epoch": 0.46, + "learning_rate": 1.9366647943104014e-05, + "loss": 0.8829, + "step": 2738 + }, + { + "epoch": 0.46, + "learning_rate": 1.9364744953326077e-05, + "loss": 0.9476, + "step": 2739 + }, + { + "epoch": 0.46, + "learning_rate": 1.936283920272345e-05, + "loss": 0.8584, + "step": 2740 + }, + { + "epoch": 0.46, + "learning_rate": 1.9360930691857966e-05, + "loss": 0.8558, + "step": 2741 + }, + { + "epoch": 0.46, + "learning_rate": 1.935901942129228e-05, + "loss": 0.9791, + "step": 2742 + }, + { + "epoch": 0.46, + "learning_rate": 1.935710539158985e-05, + "loss": 0.9519, + "step": 2743 + }, + { + "epoch": 0.46, + "learning_rate": 1.9355188603314956e-05, + "loss": 0.9498, + "step": 2744 + }, + { + "epoch": 0.46, + "learning_rate": 1.935326905703268e-05, + "loss": 0.9128, + "step": 2745 + }, + { + "epoch": 0.46, + "learning_rate": 1.9351346753308933e-05, + "loss": 0.9418, + "step": 2746 + }, + { + "epoch": 0.46, + "learning_rate": 1.9349421692710428e-05, + "loss": 0.9336, + "step": 2747 + }, + { + "epoch": 0.46, + "learning_rate": 1.9347493875804686e-05, + "loss": 0.967, + "step": 2748 + }, + { + "epoch": 0.46, + "learning_rate": 1.9345563303160056e-05, + "loss": 0.9686, + "step": 2749 + }, + { + "epoch": 0.46, + "learning_rate": 1.9343629975345687e-05, + "loss": 0.9127, + "step": 2750 + }, + { + "epoch": 0.46, + "learning_rate": 1.9341693892931544e-05, + "loss": 0.9319, + "step": 2751 + }, + { + "epoch": 0.46, + "learning_rate": 1.9339755056488405e-05, + "loss": 0.9479, + "step": 2752 + }, + { + "epoch": 0.46, + "learning_rate": 1.9337813466587864e-05, + "loss": 0.966, + "step": 2753 + }, + { + "epoch": 0.46, + "learning_rate": 1.9335869123802313e-05, + "loss": 0.9739, + "step": 2754 + }, + { + "epoch": 0.46, + "learning_rate": 1.933392202870497e-05, + "loss": 0.963, + "step": 2755 + }, + { + "epoch": 0.46, + "learning_rate": 1.9331972181869857e-05, + "loss": 0.9624, + "step": 2756 + }, + { + "epoch": 0.46, + "learning_rate": 1.9330019583871813e-05, + "loss": 0.9385, + "step": 2757 + }, + { + "epoch": 0.46, + "learning_rate": 1.932806423528648e-05, + "loss": 0.8951, + "step": 2758 + }, + { + "epoch": 0.46, + "learning_rate": 1.9326106136690322e-05, + "loss": 0.9148, + "step": 2759 + }, + { + "epoch": 0.46, + "learning_rate": 1.9324145288660595e-05, + "loss": 0.9056, + "step": 2760 + }, + { + "epoch": 0.46, + "learning_rate": 1.9322181691775387e-05, + "loss": 0.9628, + "step": 2761 + }, + { + "epoch": 0.46, + "learning_rate": 1.9320215346613586e-05, + "loss": 0.9258, + "step": 2762 + }, + { + "epoch": 0.46, + "learning_rate": 1.9318246253754886e-05, + "loss": 0.9643, + "step": 2763 + }, + { + "epoch": 0.46, + "learning_rate": 1.93162744137798e-05, + "loss": 0.9188, + "step": 2764 + }, + { + "epoch": 0.46, + "learning_rate": 1.931429982726965e-05, + "loss": 0.9061, + "step": 2765 + }, + { + "epoch": 0.46, + "learning_rate": 1.9312322494806556e-05, + "loss": 0.9651, + "step": 2766 + }, + { + "epoch": 0.46, + "learning_rate": 1.9310342416973468e-05, + "loss": 0.9288, + "step": 2767 + }, + { + "epoch": 0.46, + "learning_rate": 1.930835959435412e-05, + "loss": 0.9844, + "step": 2768 + }, + { + "epoch": 0.46, + "learning_rate": 1.9306374027533078e-05, + "loss": 1.0058, + "step": 2769 + }, + { + "epoch": 0.46, + "learning_rate": 1.9304385717095708e-05, + "loss": 0.964, + "step": 2770 + }, + { + "epoch": 0.46, + "learning_rate": 1.930239466362818e-05, + "loss": 0.9395, + "step": 2771 + }, + { + "epoch": 0.46, + "learning_rate": 1.9300400867717484e-05, + "loss": 0.9762, + "step": 2772 + }, + { + "epoch": 0.46, + "learning_rate": 1.9298404329951404e-05, + "loss": 0.9673, + "step": 2773 + }, + { + "epoch": 0.47, + "learning_rate": 1.9296405050918546e-05, + "loss": 0.9442, + "step": 2774 + }, + { + "epoch": 0.47, + "learning_rate": 1.9294403031208317e-05, + "loss": 0.983, + "step": 2775 + }, + { + "epoch": 0.47, + "learning_rate": 1.929239827141093e-05, + "loss": 0.9916, + "step": 2776 + }, + { + "epoch": 0.47, + "learning_rate": 1.929039077211741e-05, + "loss": 0.9585, + "step": 2777 + }, + { + "epoch": 0.47, + "learning_rate": 1.9288380533919597e-05, + "loss": 0.3618, + "step": 2778 + }, + { + "epoch": 0.47, + "learning_rate": 1.928636755741012e-05, + "loss": 0.9229, + "step": 2779 + }, + { + "epoch": 0.47, + "learning_rate": 1.928435184318243e-05, + "loss": 0.9529, + "step": 2780 + }, + { + "epoch": 0.47, + "learning_rate": 1.9282333391830777e-05, + "loss": 0.9489, + "step": 2781 + }, + { + "epoch": 0.47, + "learning_rate": 1.9280312203950228e-05, + "loss": 1.0185, + "step": 2782 + }, + { + "epoch": 0.47, + "learning_rate": 1.9278288280136647e-05, + "loss": 0.9449, + "step": 2783 + }, + { + "epoch": 0.47, + "learning_rate": 1.927626162098671e-05, + "loss": 0.9203, + "step": 2784 + }, + { + "epoch": 0.47, + "learning_rate": 1.9274232227097885e-05, + "loss": 0.9459, + "step": 2785 + }, + { + "epoch": 0.47, + "learning_rate": 1.927220009906848e-05, + "loss": 0.9227, + "step": 2786 + }, + { + "epoch": 0.47, + "learning_rate": 1.927016523749757e-05, + "loss": 0.9312, + "step": 2787 + }, + { + "epoch": 0.47, + "learning_rate": 1.9268127642985058e-05, + "loss": 0.9571, + "step": 2788 + }, + { + "epoch": 0.47, + "learning_rate": 1.9266087316131655e-05, + "loss": 0.8642, + "step": 2789 + }, + { + "epoch": 0.47, + "learning_rate": 1.9264044257538864e-05, + "loss": 0.9786, + "step": 2790 + }, + { + "epoch": 0.47, + "learning_rate": 1.9261998467809e-05, + "loss": 0.9479, + "step": 2791 + }, + { + "epoch": 0.47, + "learning_rate": 1.9259949947545186e-05, + "loss": 0.9755, + "step": 2792 + }, + { + "epoch": 0.47, + "learning_rate": 1.925789869735134e-05, + "loss": 0.9397, + "step": 2793 + }, + { + "epoch": 0.47, + "learning_rate": 1.9255844717832206e-05, + "loss": 0.9668, + "step": 2794 + }, + { + "epoch": 0.47, + "learning_rate": 1.9253788009593308e-05, + "loss": 0.9507, + "step": 2795 + }, + { + "epoch": 0.47, + "learning_rate": 1.9251728573240983e-05, + "loss": 0.9821, + "step": 2796 + }, + { + "epoch": 0.47, + "learning_rate": 1.9249666409382387e-05, + "loss": 0.9102, + "step": 2797 + }, + { + "epoch": 0.47, + "learning_rate": 1.9247601518625454e-05, + "loss": 0.9484, + "step": 2798 + }, + { + "epoch": 0.47, + "learning_rate": 1.9245533901578943e-05, + "loss": 0.9879, + "step": 2799 + }, + { + "epoch": 0.47, + "learning_rate": 1.9243463558852405e-05, + "loss": 0.9688, + "step": 2800 + }, + { + "epoch": 0.47, + "learning_rate": 1.92413904910562e-05, + "loss": 0.944, + "step": 2801 + }, + { + "epoch": 0.47, + "learning_rate": 1.9239314698801493e-05, + "loss": 0.9258, + "step": 2802 + }, + { + "epoch": 0.47, + "learning_rate": 1.9237236182700244e-05, + "loss": 0.9894, + "step": 2803 + }, + { + "epoch": 0.47, + "learning_rate": 1.9235154943365224e-05, + "loss": 0.9478, + "step": 2804 + }, + { + "epoch": 0.47, + "learning_rate": 1.9233070981410007e-05, + "loss": 0.8373, + "step": 2805 + }, + { + "epoch": 0.47, + "learning_rate": 1.923098429744896e-05, + "loss": 0.9401, + "step": 2806 + }, + { + "epoch": 0.47, + "learning_rate": 1.9228894892097267e-05, + "loss": 0.9464, + "step": 2807 + }, + { + "epoch": 0.47, + "learning_rate": 1.92268027659709e-05, + "loss": 0.9382, + "step": 2808 + }, + { + "epoch": 0.47, + "learning_rate": 1.9224707919686648e-05, + "loss": 0.9861, + "step": 2809 + }, + { + "epoch": 0.47, + "learning_rate": 1.922261035386208e-05, + "loss": 0.9838, + "step": 2810 + }, + { + "epoch": 0.47, + "learning_rate": 1.9220510069115595e-05, + "loss": 0.9371, + "step": 2811 + }, + { + "epoch": 0.47, + "learning_rate": 1.921840706606637e-05, + "loss": 0.945, + "step": 2812 + }, + { + "epoch": 0.47, + "learning_rate": 1.921630134533439e-05, + "loss": 0.9942, + "step": 2813 + }, + { + "epoch": 0.47, + "learning_rate": 1.9214192907540452e-05, + "loss": 0.9535, + "step": 2814 + }, + { + "epoch": 0.47, + "learning_rate": 1.9212081753306143e-05, + "loss": 0.9513, + "step": 2815 + }, + { + "epoch": 0.47, + "learning_rate": 1.920996788325385e-05, + "loss": 0.9116, + "step": 2816 + }, + { + "epoch": 0.47, + "learning_rate": 1.9207851298006766e-05, + "loss": 0.8856, + "step": 2817 + }, + { + "epoch": 0.47, + "learning_rate": 1.920573199818888e-05, + "loss": 0.9219, + "step": 2818 + }, + { + "epoch": 0.47, + "learning_rate": 1.9203609984424993e-05, + "loss": 0.9478, + "step": 2819 + }, + { + "epoch": 0.47, + "learning_rate": 1.9201485257340683e-05, + "loss": 0.9755, + "step": 2820 + }, + { + "epoch": 0.47, + "learning_rate": 1.9199357817562347e-05, + "loss": 1.0169, + "step": 2821 + }, + { + "epoch": 0.47, + "learning_rate": 1.9197227665717183e-05, + "loss": 1.0045, + "step": 2822 + }, + { + "epoch": 0.47, + "learning_rate": 1.9195094802433175e-05, + "loss": 0.9257, + "step": 2823 + }, + { + "epoch": 0.47, + "learning_rate": 1.9192959228339115e-05, + "loss": 0.9399, + "step": 2824 + }, + { + "epoch": 0.47, + "learning_rate": 1.9190820944064594e-05, + "loss": 0.8806, + "step": 2825 + }, + { + "epoch": 0.47, + "learning_rate": 1.918867995024e-05, + "loss": 0.9966, + "step": 2826 + }, + { + "epoch": 0.47, + "learning_rate": 1.918653624749652e-05, + "loss": 0.9567, + "step": 2827 + }, + { + "epoch": 0.47, + "learning_rate": 1.918438983646614e-05, + "loss": 0.9786, + "step": 2828 + }, + { + "epoch": 0.47, + "learning_rate": 1.9182240717781642e-05, + "loss": 0.9672, + "step": 2829 + }, + { + "epoch": 0.47, + "learning_rate": 1.918008889207661e-05, + "loss": 0.9026, + "step": 2830 + }, + { + "epoch": 0.47, + "learning_rate": 1.917793435998543e-05, + "loss": 0.9362, + "step": 2831 + }, + { + "epoch": 0.47, + "learning_rate": 1.9175777122143273e-05, + "loss": 0.9447, + "step": 2832 + }, + { + "epoch": 0.47, + "learning_rate": 1.917361717918612e-05, + "loss": 0.9201, + "step": 2833 + }, + { + "epoch": 0.48, + "learning_rate": 1.9171454531750745e-05, + "loss": 0.8951, + "step": 2834 + }, + { + "epoch": 0.48, + "learning_rate": 1.9169289180474714e-05, + "loss": 0.9039, + "step": 2835 + }, + { + "epoch": 0.48, + "learning_rate": 1.91671211259964e-05, + "loss": 0.8881, + "step": 2836 + }, + { + "epoch": 0.48, + "learning_rate": 1.9164950368954965e-05, + "loss": 0.8945, + "step": 2837 + }, + { + "epoch": 0.48, + "learning_rate": 1.9162776909990375e-05, + "loss": 0.9132, + "step": 2838 + }, + { + "epoch": 0.48, + "learning_rate": 1.9160600749743384e-05, + "loss": 0.9735, + "step": 2839 + }, + { + "epoch": 0.48, + "learning_rate": 1.9158421888855548e-05, + "loss": 0.926, + "step": 2840 + }, + { + "epoch": 0.48, + "learning_rate": 1.9156240327969223e-05, + "loss": 0.951, + "step": 2841 + }, + { + "epoch": 0.48, + "learning_rate": 1.9154056067727548e-05, + "loss": 0.9155, + "step": 2842 + }, + { + "epoch": 0.48, + "learning_rate": 1.915186910877447e-05, + "loss": 0.9118, + "step": 2843 + }, + { + "epoch": 0.48, + "learning_rate": 1.9149679451754726e-05, + "loss": 0.9679, + "step": 2844 + }, + { + "epoch": 0.48, + "learning_rate": 1.9147487097313854e-05, + "loss": 0.9458, + "step": 2845 + }, + { + "epoch": 0.48, + "learning_rate": 1.9145292046098175e-05, + "loss": 0.9915, + "step": 2846 + }, + { + "epoch": 0.48, + "learning_rate": 1.914309429875482e-05, + "loss": 0.8839, + "step": 2847 + }, + { + "epoch": 0.48, + "learning_rate": 1.9140893855931705e-05, + "loss": 0.9345, + "step": 2848 + }, + { + "epoch": 0.48, + "learning_rate": 1.9138690718277542e-05, + "loss": 0.9334, + "step": 2849 + }, + { + "epoch": 0.48, + "learning_rate": 1.913648488644184e-05, + "loss": 0.9549, + "step": 2850 + }, + { + "epoch": 0.48, + "learning_rate": 1.9134276361074907e-05, + "loss": 0.9275, + "step": 2851 + }, + { + "epoch": 0.48, + "learning_rate": 1.9132065142827834e-05, + "loss": 0.3941, + "step": 2852 + }, + { + "epoch": 0.48, + "learning_rate": 1.912985123235251e-05, + "loss": 0.9448, + "step": 2853 + }, + { + "epoch": 0.48, + "learning_rate": 1.912763463030162e-05, + "loss": 0.9497, + "step": 2854 + }, + { + "epoch": 0.48, + "learning_rate": 1.9125415337328644e-05, + "loss": 0.9704, + "step": 2855 + }, + { + "epoch": 0.48, + "learning_rate": 1.912319335408785e-05, + "loss": 0.9884, + "step": 2856 + }, + { + "epoch": 0.48, + "learning_rate": 1.9120968681234303e-05, + "loss": 0.9049, + "step": 2857 + }, + { + "epoch": 0.48, + "learning_rate": 1.9118741319423862e-05, + "loss": 0.9574, + "step": 2858 + }, + { + "epoch": 0.48, + "learning_rate": 1.9116511269313173e-05, + "loss": 1.0218, + "step": 2859 + }, + { + "epoch": 0.48, + "learning_rate": 1.9114278531559677e-05, + "loss": 0.908, + "step": 2860 + }, + { + "epoch": 0.48, + "learning_rate": 1.9112043106821612e-05, + "loss": 0.9503, + "step": 2861 + }, + { + "epoch": 0.48, + "learning_rate": 1.9109804995758003e-05, + "loss": 0.8831, + "step": 2862 + }, + { + "epoch": 0.48, + "learning_rate": 1.910756419902867e-05, + "loss": 0.9376, + "step": 2863 + }, + { + "epoch": 0.48, + "learning_rate": 1.910532071729422e-05, + "loss": 0.9306, + "step": 2864 + }, + { + "epoch": 0.48, + "learning_rate": 1.9103074551216058e-05, + "loss": 0.9332, + "step": 2865 + }, + { + "epoch": 0.48, + "learning_rate": 1.9100825701456376e-05, + "loss": 0.9612, + "step": 2866 + }, + { + "epoch": 0.48, + "learning_rate": 1.909857416867816e-05, + "loss": 0.951, + "step": 2867 + }, + { + "epoch": 0.48, + "learning_rate": 1.9096319953545186e-05, + "loss": 0.9885, + "step": 2868 + }, + { + "epoch": 0.48, + "learning_rate": 1.9094063056722015e-05, + "loss": 0.9423, + "step": 2869 + }, + { + "epoch": 0.48, + "learning_rate": 1.9091803478874008e-05, + "loss": 1.0025, + "step": 2870 + }, + { + "epoch": 0.48, + "learning_rate": 1.9089541220667312e-05, + "loss": 0.9652, + "step": 2871 + }, + { + "epoch": 0.48, + "learning_rate": 1.908727628276886e-05, + "loss": 0.9949, + "step": 2872 + }, + { + "epoch": 0.48, + "learning_rate": 1.908500866584639e-05, + "loss": 0.9006, + "step": 2873 + }, + { + "epoch": 0.48, + "learning_rate": 1.9082738370568407e-05, + "loss": 0.9715, + "step": 2874 + }, + { + "epoch": 0.48, + "learning_rate": 1.908046539760422e-05, + "loss": 1.0639, + "step": 2875 + }, + { + "epoch": 0.48, + "learning_rate": 1.9078189747623934e-05, + "loss": 0.944, + "step": 2876 + }, + { + "epoch": 0.48, + "learning_rate": 1.9075911421298425e-05, + "loss": 0.92, + "step": 2877 + }, + { + "epoch": 0.48, + "learning_rate": 1.9073630419299373e-05, + "loss": 0.9796, + "step": 2878 + }, + { + "epoch": 0.48, + "learning_rate": 1.9071346742299238e-05, + "loss": 0.9441, + "step": 2879 + }, + { + "epoch": 0.48, + "learning_rate": 1.9069060390971273e-05, + "loss": 0.9185, + "step": 2880 + }, + { + "epoch": 0.48, + "learning_rate": 1.9066771365989515e-05, + "loss": 1.0064, + "step": 2881 + }, + { + "epoch": 0.48, + "learning_rate": 1.90644796680288e-05, + "loss": 0.9798, + "step": 2882 + }, + { + "epoch": 0.48, + "learning_rate": 1.906218529776474e-05, + "loss": 0.9294, + "step": 2883 + }, + { + "epoch": 0.48, + "learning_rate": 1.905988825587374e-05, + "loss": 0.9472, + "step": 2884 + }, + { + "epoch": 0.48, + "learning_rate": 1.9057588543032988e-05, + "loss": 0.9803, + "step": 2885 + }, + { + "epoch": 0.48, + "learning_rate": 1.9055286159920466e-05, + "loss": 0.9296, + "step": 2886 + }, + { + "epoch": 0.48, + "learning_rate": 1.9052981107214947e-05, + "loss": 0.9495, + "step": 2887 + }, + { + "epoch": 0.48, + "learning_rate": 1.9050673385595977e-05, + "loss": 0.9411, + "step": 2888 + }, + { + "epoch": 0.48, + "learning_rate": 1.9048362995743898e-05, + "loss": 0.9445, + "step": 2889 + }, + { + "epoch": 0.48, + "learning_rate": 1.9046049938339837e-05, + "loss": 0.9258, + "step": 2890 + }, + { + "epoch": 0.48, + "learning_rate": 1.904373421406571e-05, + "loss": 0.8742, + "step": 2891 + }, + { + "epoch": 0.48, + "learning_rate": 1.9041415823604214e-05, + "loss": 0.3869, + "step": 2892 + }, + { + "epoch": 0.48, + "learning_rate": 1.9039094767638834e-05, + "loss": 0.9186, + "step": 2893 + }, + { + "epoch": 0.49, + "learning_rate": 1.9036771046853845e-05, + "loss": 0.9707, + "step": 2894 + }, + { + "epoch": 0.49, + "learning_rate": 1.9034444661934302e-05, + "loss": 0.9436, + "step": 2895 + }, + { + "epoch": 0.49, + "learning_rate": 1.9032115613566047e-05, + "loss": 0.9149, + "step": 2896 + }, + { + "epoch": 0.49, + "learning_rate": 1.902978390243571e-05, + "loss": 0.9555, + "step": 2897 + }, + { + "epoch": 0.49, + "learning_rate": 1.9027449529230703e-05, + "loss": 0.9552, + "step": 2898 + }, + { + "epoch": 0.49, + "learning_rate": 1.902511249463922e-05, + "loss": 0.9389, + "step": 2899 + }, + { + "epoch": 0.49, + "learning_rate": 1.9022772799350248e-05, + "loss": 0.9918, + "step": 2900 + }, + { + "epoch": 0.49, + "learning_rate": 1.9020430444053554e-05, + "loss": 0.9139, + "step": 2901 + }, + { + "epoch": 0.49, + "learning_rate": 1.9018085429439683e-05, + "loss": 0.9018, + "step": 2902 + }, + { + "epoch": 0.49, + "learning_rate": 1.9015737756199976e-05, + "loss": 0.9818, + "step": 2903 + }, + { + "epoch": 0.49, + "learning_rate": 1.901338742502655e-05, + "loss": 0.9751, + "step": 2904 + }, + { + "epoch": 0.49, + "learning_rate": 1.9011034436612305e-05, + "loss": 0.9075, + "step": 2905 + }, + { + "epoch": 0.49, + "learning_rate": 1.9008678791650927e-05, + "loss": 0.9623, + "step": 2906 + }, + { + "epoch": 0.49, + "learning_rate": 1.9006320490836886e-05, + "loss": 0.3593, + "step": 2907 + }, + { + "epoch": 0.49, + "learning_rate": 1.9003959534865437e-05, + "loss": 0.932, + "step": 2908 + }, + { + "epoch": 0.49, + "learning_rate": 1.9001595924432606e-05, + "loss": 0.9624, + "step": 2909 + }, + { + "epoch": 0.49, + "learning_rate": 1.899922966023522e-05, + "loss": 0.971, + "step": 2910 + }, + { + "epoch": 0.49, + "learning_rate": 1.8996860742970872e-05, + "loss": 0.9448, + "step": 2911 + }, + { + "epoch": 0.49, + "learning_rate": 1.8994489173337943e-05, + "loss": 0.9078, + "step": 2912 + }, + { + "epoch": 0.49, + "learning_rate": 1.8992114952035602e-05, + "loss": 0.969, + "step": 2913 + }, + { + "epoch": 0.49, + "learning_rate": 1.8989738079763788e-05, + "loss": 0.9274, + "step": 2914 + }, + { + "epoch": 0.49, + "learning_rate": 1.8987358557223232e-05, + "loss": 0.9903, + "step": 2915 + }, + { + "epoch": 0.49, + "learning_rate": 1.898497638511544e-05, + "loss": 0.9381, + "step": 2916 + }, + { + "epoch": 0.49, + "learning_rate": 1.8982591564142702e-05, + "loss": 0.8965, + "step": 2917 + }, + { + "epoch": 0.49, + "learning_rate": 1.8980204095008087e-05, + "loss": 0.9647, + "step": 2918 + }, + { + "epoch": 0.49, + "learning_rate": 1.897781397841545e-05, + "loss": 0.9274, + "step": 2919 + }, + { + "epoch": 0.49, + "learning_rate": 1.8975421215069416e-05, + "loss": 0.9531, + "step": 2920 + }, + { + "epoch": 0.49, + "learning_rate": 1.8973025805675403e-05, + "loss": 1.0206, + "step": 2921 + }, + { + "epoch": 0.49, + "learning_rate": 1.8970627750939595e-05, + "loss": 0.9606, + "step": 2922 + }, + { + "epoch": 0.49, + "learning_rate": 1.896822705156897e-05, + "loss": 0.862, + "step": 2923 + }, + { + "epoch": 0.49, + "learning_rate": 1.896582370827128e-05, + "loss": 0.9857, + "step": 2924 + }, + { + "epoch": 0.49, + "learning_rate": 1.8963417721755052e-05, + "loss": 0.9206, + "step": 2925 + }, + { + "epoch": 0.49, + "learning_rate": 1.8961009092729598e-05, + "loss": 0.9038, + "step": 2926 + }, + { + "epoch": 0.49, + "learning_rate": 1.8958597821905004e-05, + "loss": 0.9726, + "step": 2927 + }, + { + "epoch": 0.49, + "learning_rate": 1.8956183909992144e-05, + "loss": 0.972, + "step": 2928 + }, + { + "epoch": 0.49, + "learning_rate": 1.8953767357702655e-05, + "loss": 0.917, + "step": 2929 + }, + { + "epoch": 0.49, + "learning_rate": 1.8951348165748973e-05, + "loss": 0.9122, + "step": 2930 + }, + { + "epoch": 0.49, + "learning_rate": 1.894892633484429e-05, + "loss": 0.9144, + "step": 2931 + }, + { + "epoch": 0.49, + "learning_rate": 1.89465018657026e-05, + "loss": 0.9796, + "step": 2932 + }, + { + "epoch": 0.49, + "learning_rate": 1.8944074759038648e-05, + "loss": 0.96, + "step": 2933 + }, + { + "epoch": 0.49, + "learning_rate": 1.894164501556798e-05, + "loss": 0.8908, + "step": 2934 + }, + { + "epoch": 0.49, + "learning_rate": 1.8939212636006902e-05, + "loss": 0.9185, + "step": 2935 + }, + { + "epoch": 0.49, + "learning_rate": 1.8936777621072514e-05, + "loss": 0.9764, + "step": 2936 + }, + { + "epoch": 0.49, + "learning_rate": 1.8934339971482676e-05, + "loss": 0.4364, + "step": 2937 + }, + { + "epoch": 0.49, + "learning_rate": 1.8931899687956038e-05, + "loss": 1.002, + "step": 2938 + }, + { + "epoch": 0.49, + "learning_rate": 1.892945677121202e-05, + "loss": 0.9292, + "step": 2939 + }, + { + "epoch": 0.49, + "learning_rate": 1.892701122197082e-05, + "loss": 0.9533, + "step": 2940 + }, + { + "epoch": 0.49, + "learning_rate": 1.8924563040953403e-05, + "loss": 0.998, + "step": 2941 + }, + { + "epoch": 0.49, + "learning_rate": 1.8922112228881532e-05, + "loss": 0.3717, + "step": 2942 + }, + { + "epoch": 0.49, + "learning_rate": 1.891965878647772e-05, + "loss": 0.9261, + "step": 2943 + }, + { + "epoch": 0.49, + "learning_rate": 1.8917202714465275e-05, + "loss": 0.9183, + "step": 2944 + }, + { + "epoch": 0.49, + "learning_rate": 1.8914744013568273e-05, + "loss": 0.9345, + "step": 2945 + }, + { + "epoch": 0.49, + "learning_rate": 1.891228268451156e-05, + "loss": 0.9105, + "step": 2946 + }, + { + "epoch": 0.49, + "learning_rate": 1.8909818728020765e-05, + "loss": 0.9159, + "step": 2947 + }, + { + "epoch": 0.49, + "learning_rate": 1.8907352144822285e-05, + "loss": 0.9715, + "step": 2948 + }, + { + "epoch": 0.49, + "learning_rate": 1.8904882935643293e-05, + "loss": 0.947, + "step": 2949 + }, + { + "epoch": 0.49, + "learning_rate": 1.8902411101211747e-05, + "loss": 0.9807, + "step": 2950 + }, + { + "epoch": 0.49, + "learning_rate": 1.889993664225636e-05, + "loss": 0.9759, + "step": 2951 + }, + { + "epoch": 0.49, + "learning_rate": 1.8897459559506632e-05, + "loss": 0.9605, + "step": 2952 + }, + { + "epoch": 0.5, + "learning_rate": 1.8894979853692836e-05, + "loss": 0.9495, + "step": 2953 + }, + { + "epoch": 0.5, + "learning_rate": 1.889249752554601e-05, + "loss": 0.9349, + "step": 2954 + }, + { + "epoch": 0.5, + "learning_rate": 1.889001257579797e-05, + "loss": 0.3604, + "step": 2955 + }, + { + "epoch": 0.5, + "learning_rate": 1.888752500518131e-05, + "loss": 0.8997, + "step": 2956 + }, + { + "epoch": 0.5, + "learning_rate": 1.888503481442939e-05, + "loss": 0.883, + "step": 2957 + }, + { + "epoch": 0.5, + "learning_rate": 1.8882542004276343e-05, + "loss": 0.9827, + "step": 2958 + }, + { + "epoch": 0.5, + "learning_rate": 1.8880046575457072e-05, + "loss": 0.9163, + "step": 2959 + }, + { + "epoch": 0.5, + "learning_rate": 1.8877548528707267e-05, + "loss": 0.9364, + "step": 2960 + }, + { + "epoch": 0.5, + "learning_rate": 1.8875047864763366e-05, + "loss": 0.9225, + "step": 2961 + }, + { + "epoch": 0.5, + "learning_rate": 1.88725445843626e-05, + "loss": 0.9104, + "step": 2962 + }, + { + "epoch": 0.5, + "learning_rate": 1.887003868824295e-05, + "loss": 0.9777, + "step": 2963 + }, + { + "epoch": 0.5, + "learning_rate": 1.8867530177143192e-05, + "loss": 0.9289, + "step": 2964 + }, + { + "epoch": 0.5, + "learning_rate": 1.886501905180286e-05, + "loss": 0.9382, + "step": 2965 + }, + { + "epoch": 0.5, + "learning_rate": 1.8862505312962257e-05, + "loss": 0.9282, + "step": 2966 + }, + { + "epoch": 0.5, + "learning_rate": 1.8859988961362455e-05, + "loss": 0.9608, + "step": 2967 + }, + { + "epoch": 0.5, + "learning_rate": 1.885746999774531e-05, + "loss": 0.9366, + "step": 2968 + }, + { + "epoch": 0.5, + "learning_rate": 1.8854948422853436e-05, + "loss": 0.8856, + "step": 2969 + }, + { + "epoch": 0.5, + "learning_rate": 1.8852424237430215e-05, + "loss": 0.9204, + "step": 2970 + }, + { + "epoch": 0.5, + "learning_rate": 1.884989744221981e-05, + "loss": 0.8802, + "step": 2971 + }, + { + "epoch": 0.5, + "learning_rate": 1.8847368037967138e-05, + "loss": 1.0375, + "step": 2972 + }, + { + "epoch": 0.5, + "learning_rate": 1.8844836025417905e-05, + "loss": 0.9668, + "step": 2973 + }, + { + "epoch": 0.5, + "learning_rate": 1.8842301405318567e-05, + "loss": 0.9636, + "step": 2974 + }, + { + "epoch": 0.5, + "learning_rate": 1.8839764178416354e-05, + "loss": 0.944, + "step": 2975 + }, + { + "epoch": 0.5, + "learning_rate": 1.8837224345459276e-05, + "loss": 0.9012, + "step": 2976 + }, + { + "epoch": 0.5, + "learning_rate": 1.8834681907196094e-05, + "loss": 0.4154, + "step": 2977 + }, + { + "epoch": 0.5, + "learning_rate": 1.883213686437635e-05, + "loss": 0.9558, + "step": 2978 + }, + { + "epoch": 0.5, + "learning_rate": 1.882958921775035e-05, + "loss": 0.9491, + "step": 2979 + }, + { + "epoch": 0.5, + "learning_rate": 1.8827038968069163e-05, + "loss": 0.8959, + "step": 2980 + }, + { + "epoch": 0.5, + "learning_rate": 1.8824486116084633e-05, + "loss": 0.9779, + "step": 2981 + }, + { + "epoch": 0.5, + "learning_rate": 1.8821930662549363e-05, + "loss": 0.9519, + "step": 2982 + }, + { + "epoch": 0.5, + "learning_rate": 1.881937260821673e-05, + "loss": 0.8863, + "step": 2983 + }, + { + "epoch": 0.5, + "learning_rate": 1.8816811953840877e-05, + "loss": 0.9392, + "step": 2984 + }, + { + "epoch": 0.5, + "learning_rate": 1.881424870017671e-05, + "loss": 1.0071, + "step": 2985 + }, + { + "epoch": 0.5, + "learning_rate": 1.8811682847979902e-05, + "loss": 0.891, + "step": 2986 + }, + { + "epoch": 0.5, + "learning_rate": 1.8809114398006895e-05, + "loss": 1.0081, + "step": 2987 + }, + { + "epoch": 0.5, + "learning_rate": 1.8806543351014893e-05, + "loss": 0.9813, + "step": 2988 + }, + { + "epoch": 0.5, + "learning_rate": 1.8803969707761866e-05, + "loss": 0.8979, + "step": 2989 + }, + { + "epoch": 0.5, + "learning_rate": 1.8801393469006558e-05, + "loss": 0.9345, + "step": 2990 + }, + { + "epoch": 0.5, + "learning_rate": 1.879881463550846e-05, + "loss": 0.9184, + "step": 2991 + }, + { + "epoch": 0.5, + "learning_rate": 1.879623320802785e-05, + "loss": 0.942, + "step": 2992 + }, + { + "epoch": 0.5, + "learning_rate": 1.8793649187325754e-05, + "loss": 0.9375, + "step": 2993 + }, + { + "epoch": 0.5, + "learning_rate": 1.879106257416397e-05, + "loss": 0.929, + "step": 2994 + }, + { + "epoch": 0.5, + "learning_rate": 1.8788473369305058e-05, + "loss": 0.394, + "step": 2995 + }, + { + "epoch": 0.5, + "learning_rate": 1.8785881573512345e-05, + "loss": 1.0571, + "step": 2996 + }, + { + "epoch": 0.5, + "learning_rate": 1.8783287187549915e-05, + "loss": 0.9497, + "step": 2997 + }, + { + "epoch": 0.5, + "learning_rate": 1.878069021218262e-05, + "loss": 0.9243, + "step": 2998 + }, + { + "epoch": 0.5, + "learning_rate": 1.877809064817608e-05, + "loss": 0.945, + "step": 2999 + }, + { + "epoch": 0.5, + "learning_rate": 1.8775488496296672e-05, + "loss": 0.9662, + "step": 3000 + }, + { + "epoch": 0.5, + "learning_rate": 1.877288375731154e-05, + "loss": 0.9581, + "step": 3001 + }, + { + "epoch": 0.5, + "learning_rate": 1.8770276431988582e-05, + "loss": 0.9373, + "step": 3002 + }, + { + "epoch": 0.5, + "learning_rate": 1.876766652109647e-05, + "loss": 0.3832, + "step": 3003 + }, + { + "epoch": 0.5, + "learning_rate": 1.8765054025404627e-05, + "loss": 0.9375, + "step": 3004 + }, + { + "epoch": 0.5, + "learning_rate": 1.8762438945683252e-05, + "loss": 0.4045, + "step": 3005 + }, + { + "epoch": 0.5, + "learning_rate": 1.875982128270329e-05, + "loss": 0.9953, + "step": 3006 + }, + { + "epoch": 0.5, + "learning_rate": 1.875720103723646e-05, + "loss": 0.9678, + "step": 3007 + }, + { + "epoch": 0.5, + "learning_rate": 1.875457821005524e-05, + "loss": 0.9366, + "step": 3008 + }, + { + "epoch": 0.5, + "learning_rate": 1.8751952801932866e-05, + "loss": 0.9529, + "step": 3009 + }, + { + "epoch": 0.5, + "learning_rate": 1.8749324813643328e-05, + "loss": 0.9903, + "step": 3010 + }, + { + "epoch": 0.5, + "learning_rate": 1.8746694245961395e-05, + "loss": 0.924, + "step": 3011 + }, + { + "epoch": 0.5, + "learning_rate": 1.874406109966258e-05, + "loss": 0.9782, + "step": 3012 + }, + { + "epoch": 0.51, + "learning_rate": 1.874142537552316e-05, + "loss": 0.9762, + "step": 3013 + }, + { + "epoch": 0.51, + "learning_rate": 1.873878707432018e-05, + "loss": 0.952, + "step": 3014 + }, + { + "epoch": 0.51, + "learning_rate": 1.8736146196831433e-05, + "loss": 0.9629, + "step": 3015 + }, + { + "epoch": 0.51, + "learning_rate": 1.8733502743835483e-05, + "loss": 0.8768, + "step": 3016 + }, + { + "epoch": 0.51, + "learning_rate": 1.8730856716111642e-05, + "loss": 0.907, + "step": 3017 + }, + { + "epoch": 0.51, + "learning_rate": 1.8728208114439992e-05, + "loss": 0.3812, + "step": 3018 + }, + { + "epoch": 0.51, + "learning_rate": 1.8725556939601365e-05, + "loss": 0.9751, + "step": 3019 + }, + { + "epoch": 0.51, + "learning_rate": 1.8722903192377355e-05, + "loss": 0.9683, + "step": 3020 + }, + { + "epoch": 0.51, + "learning_rate": 1.872024687355032e-05, + "loss": 0.4154, + "step": 3021 + }, + { + "epoch": 0.51, + "learning_rate": 1.871758798390336e-05, + "loss": 0.9448, + "step": 3022 + }, + { + "epoch": 0.51, + "learning_rate": 1.871492652422035e-05, + "loss": 0.9886, + "step": 3023 + }, + { + "epoch": 0.51, + "learning_rate": 1.8712262495285917e-05, + "loss": 0.9956, + "step": 3024 + }, + { + "epoch": 0.51, + "learning_rate": 1.870959589788544e-05, + "loss": 0.9772, + "step": 3025 + }, + { + "epoch": 0.51, + "learning_rate": 1.8706926732805065e-05, + "loss": 0.9371, + "step": 3026 + }, + { + "epoch": 0.51, + "learning_rate": 1.8704255000831688e-05, + "loss": 0.893, + "step": 3027 + }, + { + "epoch": 0.51, + "learning_rate": 1.8701580702752963e-05, + "loss": 0.971, + "step": 3028 + }, + { + "epoch": 0.51, + "learning_rate": 1.8698903839357304e-05, + "loss": 0.9539, + "step": 3029 + }, + { + "epoch": 0.51, + "learning_rate": 1.869622441143387e-05, + "loss": 0.9183, + "step": 3030 + }, + { + "epoch": 0.51, + "learning_rate": 1.8693542419772593e-05, + "loss": 0.923, + "step": 3031 + }, + { + "epoch": 0.51, + "learning_rate": 1.8690857865164145e-05, + "loss": 0.9386, + "step": 3032 + }, + { + "epoch": 0.51, + "learning_rate": 1.868817074839997e-05, + "loss": 0.9586, + "step": 3033 + }, + { + "epoch": 0.51, + "learning_rate": 1.868548107027225e-05, + "loss": 0.9956, + "step": 3034 + }, + { + "epoch": 0.51, + "learning_rate": 1.8682788831573932e-05, + "loss": 0.9066, + "step": 3035 + }, + { + "epoch": 0.51, + "learning_rate": 1.8680094033098718e-05, + "loss": 0.9208, + "step": 3036 + }, + { + "epoch": 0.51, + "learning_rate": 1.8677396675641062e-05, + "loss": 1.001, + "step": 3037 + }, + { + "epoch": 0.51, + "learning_rate": 1.8674696759996173e-05, + "loss": 0.8942, + "step": 3038 + }, + { + "epoch": 0.51, + "learning_rate": 1.8671994286960014e-05, + "loss": 1.022, + "step": 3039 + }, + { + "epoch": 0.51, + "learning_rate": 1.8669289257329305e-05, + "loss": 0.9191, + "step": 3040 + }, + { + "epoch": 0.51, + "learning_rate": 1.8666581671901513e-05, + "loss": 0.9252, + "step": 3041 + }, + { + "epoch": 0.51, + "learning_rate": 1.866387153147486e-05, + "loss": 0.9399, + "step": 3042 + }, + { + "epoch": 0.51, + "learning_rate": 1.8661158836848333e-05, + "loss": 0.9905, + "step": 3043 + }, + { + "epoch": 0.51, + "learning_rate": 1.8658443588821657e-05, + "loss": 0.9309, + "step": 3044 + }, + { + "epoch": 0.51, + "learning_rate": 1.8655725788195315e-05, + "loss": 0.8677, + "step": 3045 + }, + { + "epoch": 0.51, + "learning_rate": 1.8653005435770546e-05, + "loss": 0.8909, + "step": 3046 + }, + { + "epoch": 0.51, + "learning_rate": 1.8650282532349332e-05, + "loss": 0.9567, + "step": 3047 + }, + { + "epoch": 0.51, + "learning_rate": 1.8647557078734423e-05, + "loss": 0.9256, + "step": 3048 + }, + { + "epoch": 0.51, + "learning_rate": 1.8644829075729303e-05, + "loss": 0.9665, + "step": 3049 + }, + { + "epoch": 0.51, + "learning_rate": 1.864209852413822e-05, + "loss": 0.9289, + "step": 3050 + }, + { + "epoch": 0.51, + "learning_rate": 1.863936542476617e-05, + "loss": 0.8635, + "step": 3051 + }, + { + "epoch": 0.51, + "learning_rate": 1.8636629778418894e-05, + "loss": 0.9377, + "step": 3052 + }, + { + "epoch": 0.51, + "learning_rate": 1.863389158590289e-05, + "loss": 0.9185, + "step": 3053 + }, + { + "epoch": 0.51, + "learning_rate": 1.8631150848025414e-05, + "loss": 0.9575, + "step": 3054 + }, + { + "epoch": 0.51, + "learning_rate": 1.862840756559446e-05, + "loss": 0.9247, + "step": 3055 + }, + { + "epoch": 0.51, + "learning_rate": 1.8625661739418767e-05, + "loss": 0.9012, + "step": 3056 + }, + { + "epoch": 0.51, + "learning_rate": 1.8622913370307846e-05, + "loss": 0.9645, + "step": 3057 + }, + { + "epoch": 0.51, + "learning_rate": 1.8620162459071936e-05, + "loss": 0.9357, + "step": 3058 + }, + { + "epoch": 0.51, + "learning_rate": 1.8617409006522042e-05, + "loss": 0.9827, + "step": 3059 + }, + { + "epoch": 0.51, + "learning_rate": 1.8614653013469905e-05, + "loss": 0.9806, + "step": 3060 + }, + { + "epoch": 0.51, + "learning_rate": 1.861189448072802e-05, + "loss": 0.9456, + "step": 3061 + }, + { + "epoch": 0.51, + "learning_rate": 1.8609133409109637e-05, + "loss": 0.8895, + "step": 3062 + }, + { + "epoch": 0.51, + "learning_rate": 1.8606369799428744e-05, + "loss": 0.9519, + "step": 3063 + }, + { + "epoch": 0.51, + "learning_rate": 1.8603603652500085e-05, + "loss": 0.8763, + "step": 3064 + }, + { + "epoch": 0.51, + "learning_rate": 1.860083496913915e-05, + "loss": 0.8641, + "step": 3065 + }, + { + "epoch": 0.51, + "learning_rate": 1.8598063750162166e-05, + "loss": 1.0189, + "step": 3066 + }, + { + "epoch": 0.51, + "learning_rate": 1.859528999638613e-05, + "loss": 0.884, + "step": 3067 + }, + { + "epoch": 0.51, + "learning_rate": 1.8592513708628767e-05, + "loss": 0.9525, + "step": 3068 + }, + { + "epoch": 0.51, + "learning_rate": 1.8589734887708556e-05, + "loss": 0.944, + "step": 3069 + }, + { + "epoch": 0.51, + "learning_rate": 1.8586953534444726e-05, + "loss": 0.983, + "step": 3070 + }, + { + "epoch": 0.51, + "learning_rate": 1.8584169649657244e-05, + "loss": 0.9097, + "step": 3071 + }, + { + "epoch": 0.52, + "learning_rate": 1.8581383234166834e-05, + "loss": 0.9313, + "step": 3072 + }, + { + "epoch": 0.52, + "learning_rate": 1.857859428879495e-05, + "loss": 0.9421, + "step": 3073 + }, + { + "epoch": 0.52, + "learning_rate": 1.8575802814363816e-05, + "loss": 0.9268, + "step": 3074 + }, + { + "epoch": 0.52, + "learning_rate": 1.8573008811696378e-05, + "loss": 0.9749, + "step": 3075 + }, + { + "epoch": 0.52, + "learning_rate": 1.857021228161634e-05, + "loss": 0.9314, + "step": 3076 + }, + { + "epoch": 0.52, + "learning_rate": 1.856741322494815e-05, + "loss": 0.9186, + "step": 3077 + }, + { + "epoch": 0.52, + "learning_rate": 1.8564611642517e-05, + "loss": 0.9698, + "step": 3078 + }, + { + "epoch": 0.52, + "learning_rate": 1.8561807535148818e-05, + "loss": 0.8907, + "step": 3079 + }, + { + "epoch": 0.52, + "learning_rate": 1.8559000903670293e-05, + "loss": 0.973, + "step": 3080 + }, + { + "epoch": 0.52, + "learning_rate": 1.8556191748908845e-05, + "loss": 0.9685, + "step": 3081 + }, + { + "epoch": 0.52, + "learning_rate": 1.8553380071692645e-05, + "loss": 0.9351, + "step": 3082 + }, + { + "epoch": 0.52, + "learning_rate": 1.8550565872850602e-05, + "loss": 0.9727, + "step": 3083 + }, + { + "epoch": 0.52, + "learning_rate": 1.8547749153212373e-05, + "loss": 0.9473, + "step": 3084 + }, + { + "epoch": 0.52, + "learning_rate": 1.8544929913608353e-05, + "loss": 1.0132, + "step": 3085 + }, + { + "epoch": 0.52, + "learning_rate": 1.8542108154869686e-05, + "loss": 0.9785, + "step": 3086 + }, + { + "epoch": 0.52, + "learning_rate": 1.8539283877828257e-05, + "loss": 0.9323, + "step": 3087 + }, + { + "epoch": 0.52, + "learning_rate": 1.8536457083316692e-05, + "loss": 0.9503, + "step": 3088 + }, + { + "epoch": 0.52, + "learning_rate": 1.8533627772168362e-05, + "loss": 0.8878, + "step": 3089 + }, + { + "epoch": 0.52, + "learning_rate": 1.853079594521737e-05, + "loss": 0.9094, + "step": 3090 + }, + { + "epoch": 0.52, + "learning_rate": 1.8527961603298572e-05, + "loss": 0.9277, + "step": 3091 + }, + { + "epoch": 0.52, + "learning_rate": 1.8525124747247566e-05, + "loss": 0.9887, + "step": 3092 + }, + { + "epoch": 0.52, + "learning_rate": 1.852228537790068e-05, + "loss": 0.9656, + "step": 3093 + }, + { + "epoch": 0.52, + "learning_rate": 1.851944349609499e-05, + "loss": 0.3616, + "step": 3094 + }, + { + "epoch": 0.52, + "learning_rate": 1.8516599102668324e-05, + "loss": 0.9622, + "step": 3095 + }, + { + "epoch": 0.52, + "learning_rate": 1.8513752198459224e-05, + "loss": 0.8929, + "step": 3096 + }, + { + "epoch": 0.52, + "learning_rate": 1.851090278430699e-05, + "loss": 0.8792, + "step": 3097 + }, + { + "epoch": 0.52, + "learning_rate": 1.850805086105167e-05, + "loss": 0.979, + "step": 3098 + }, + { + "epoch": 0.52, + "learning_rate": 1.850519642953403e-05, + "loss": 0.9619, + "step": 3099 + }, + { + "epoch": 0.52, + "learning_rate": 1.850233949059559e-05, + "loss": 0.9006, + "step": 3100 + }, + { + "epoch": 0.52, + "learning_rate": 1.8499480045078602e-05, + "loss": 0.9821, + "step": 3101 + }, + { + "epoch": 0.52, + "learning_rate": 1.8496618093826064e-05, + "loss": 0.9036, + "step": 3102 + }, + { + "epoch": 0.52, + "learning_rate": 1.849375363768171e-05, + "loss": 0.913, + "step": 3103 + }, + { + "epoch": 0.52, + "learning_rate": 1.8490886677490007e-05, + "loss": 0.8896, + "step": 3104 + }, + { + "epoch": 0.52, + "learning_rate": 1.8488017214096173e-05, + "loss": 0.946, + "step": 3105 + }, + { + "epoch": 0.52, + "learning_rate": 1.8485145248346147e-05, + "loss": 0.4072, + "step": 3106 + }, + { + "epoch": 0.52, + "learning_rate": 1.848227078108662e-05, + "loss": 0.9403, + "step": 3107 + }, + { + "epoch": 0.52, + "learning_rate": 1.847939381316501e-05, + "loss": 0.9093, + "step": 3108 + }, + { + "epoch": 0.52, + "learning_rate": 1.8476514345429485e-05, + "loss": 0.9413, + "step": 3109 + }, + { + "epoch": 0.52, + "learning_rate": 1.8473632378728932e-05, + "loss": 0.9562, + "step": 3110 + }, + { + "epoch": 0.52, + "learning_rate": 1.8470747913912993e-05, + "loss": 0.9385, + "step": 3111 + }, + { + "epoch": 0.52, + "learning_rate": 1.8467860951832035e-05, + "loss": 0.9462, + "step": 3112 + }, + { + "epoch": 0.52, + "learning_rate": 1.8464971493337167e-05, + "loss": 0.9438, + "step": 3113 + }, + { + "epoch": 0.52, + "learning_rate": 1.8462079539280233e-05, + "loss": 0.9891, + "step": 3114 + }, + { + "epoch": 0.52, + "learning_rate": 1.8459185090513802e-05, + "loss": 0.9912, + "step": 3115 + }, + { + "epoch": 0.52, + "learning_rate": 1.8456288147891196e-05, + "loss": 0.9036, + "step": 3116 + }, + { + "epoch": 0.52, + "learning_rate": 1.8453388712266464e-05, + "loss": 0.9844, + "step": 3117 + }, + { + "epoch": 0.52, + "learning_rate": 1.8450486784494384e-05, + "loss": 0.9761, + "step": 3118 + }, + { + "epoch": 0.52, + "learning_rate": 1.844758236543048e-05, + "loss": 1.026, + "step": 3119 + }, + { + "epoch": 0.52, + "learning_rate": 1.8444675455931006e-05, + "loss": 0.9613, + "step": 3120 + }, + { + "epoch": 0.52, + "learning_rate": 1.8441766056852947e-05, + "loss": 0.9278, + "step": 3121 + }, + { + "epoch": 0.52, + "learning_rate": 1.8438854169054022e-05, + "loss": 0.9483, + "step": 3122 + }, + { + "epoch": 0.52, + "learning_rate": 1.8435939793392686e-05, + "loss": 0.9573, + "step": 3123 + }, + { + "epoch": 0.52, + "learning_rate": 1.8433022930728132e-05, + "loss": 0.9068, + "step": 3124 + }, + { + "epoch": 0.52, + "learning_rate": 1.8430103581920278e-05, + "loss": 0.9619, + "step": 3125 + }, + { + "epoch": 0.52, + "learning_rate": 1.842718174782978e-05, + "loss": 0.8825, + "step": 3126 + }, + { + "epoch": 0.52, + "learning_rate": 1.8424257429318027e-05, + "loss": 1.0043, + "step": 3127 + }, + { + "epoch": 0.52, + "learning_rate": 1.8421330627247137e-05, + "loss": 0.9756, + "step": 3128 + }, + { + "epoch": 0.52, + "learning_rate": 1.841840134247996e-05, + "loss": 0.9176, + "step": 3129 + }, + { + "epoch": 0.52, + "learning_rate": 1.8415469575880078e-05, + "loss": 0.9428, + "step": 3130 + }, + { + "epoch": 0.52, + "learning_rate": 1.8412535328311813e-05, + "loss": 0.8248, + "step": 3131 + }, + { + "epoch": 0.53, + "learning_rate": 1.840959860064021e-05, + "loss": 0.9625, + "step": 3132 + }, + { + "epoch": 0.53, + "learning_rate": 1.840665939373104e-05, + "loss": 1.0249, + "step": 3133 + }, + { + "epoch": 0.53, + "learning_rate": 1.8403717708450823e-05, + "loss": 0.9361, + "step": 3134 + }, + { + "epoch": 0.53, + "learning_rate": 1.8400773545666788e-05, + "loss": 0.9221, + "step": 3135 + }, + { + "epoch": 0.53, + "learning_rate": 1.8397826906246913e-05, + "loss": 0.9559, + "step": 3136 + }, + { + "epoch": 0.53, + "learning_rate": 1.8394877791059895e-05, + "loss": 0.9647, + "step": 3137 + }, + { + "epoch": 0.53, + "learning_rate": 1.8391926200975162e-05, + "loss": 0.4195, + "step": 3138 + }, + { + "epoch": 0.53, + "learning_rate": 1.838897213686288e-05, + "loss": 0.8897, + "step": 3139 + }, + { + "epoch": 0.53, + "learning_rate": 1.838601559959393e-05, + "loss": 0.9025, + "step": 3140 + }, + { + "epoch": 0.53, + "learning_rate": 1.8383056590039932e-05, + "loss": 0.8827, + "step": 3141 + }, + { + "epoch": 0.53, + "learning_rate": 1.8380095109073236e-05, + "loss": 0.9844, + "step": 3142 + }, + { + "epoch": 0.53, + "learning_rate": 1.8377131157566917e-05, + "loss": 0.9528, + "step": 3143 + }, + { + "epoch": 0.53, + "learning_rate": 1.8374164736394777e-05, + "loss": 0.9798, + "step": 3144 + }, + { + "epoch": 0.53, + "learning_rate": 1.8371195846431355e-05, + "loss": 0.9385, + "step": 3145 + }, + { + "epoch": 0.53, + "learning_rate": 1.8368224488551898e-05, + "loss": 1.0039, + "step": 3146 + }, + { + "epoch": 0.53, + "learning_rate": 1.83652506636324e-05, + "loss": 0.9326, + "step": 3147 + }, + { + "epoch": 0.53, + "learning_rate": 1.8362274372549577e-05, + "loss": 0.8871, + "step": 3148 + }, + { + "epoch": 0.53, + "learning_rate": 1.835929561618087e-05, + "loss": 0.9088, + "step": 3149 + }, + { + "epoch": 0.53, + "learning_rate": 1.835631439540445e-05, + "loss": 0.9632, + "step": 3150 + }, + { + "epoch": 0.53, + "learning_rate": 1.83533307110992e-05, + "loss": 0.902, + "step": 3151 + }, + { + "epoch": 0.53, + "learning_rate": 1.835034456414476e-05, + "loss": 0.8714, + "step": 3152 + }, + { + "epoch": 0.53, + "learning_rate": 1.8347355955421462e-05, + "loss": 0.8602, + "step": 3153 + }, + { + "epoch": 0.53, + "learning_rate": 1.8344364885810388e-05, + "loss": 0.9519, + "step": 3154 + }, + { + "epoch": 0.53, + "learning_rate": 1.8341371356193334e-05, + "loss": 1.0344, + "step": 3155 + }, + { + "epoch": 0.53, + "learning_rate": 1.8338375367452822e-05, + "loss": 0.9471, + "step": 3156 + }, + { + "epoch": 0.53, + "learning_rate": 1.8335376920472098e-05, + "loss": 0.9648, + "step": 3157 + }, + { + "epoch": 0.53, + "learning_rate": 1.8332376016135146e-05, + "loss": 0.8887, + "step": 3158 + }, + { + "epoch": 0.53, + "learning_rate": 1.832937265532665e-05, + "loss": 0.9448, + "step": 3159 + }, + { + "epoch": 0.53, + "learning_rate": 1.8326366838932048e-05, + "loss": 0.9927, + "step": 3160 + }, + { + "epoch": 0.53, + "learning_rate": 1.8323358567837474e-05, + "loss": 0.3924, + "step": 3161 + }, + { + "epoch": 0.53, + "learning_rate": 1.83203478429298e-05, + "loss": 0.9464, + "step": 3162 + }, + { + "epoch": 0.53, + "learning_rate": 1.831733466509662e-05, + "loss": 0.9716, + "step": 3163 + }, + { + "epoch": 0.53, + "learning_rate": 1.8314319035226254e-05, + "loss": 0.9175, + "step": 3164 + }, + { + "epoch": 0.53, + "learning_rate": 1.8311300954207737e-05, + "loss": 0.9347, + "step": 3165 + }, + { + "epoch": 0.53, + "learning_rate": 1.8308280422930832e-05, + "loss": 0.8972, + "step": 3166 + }, + { + "epoch": 0.53, + "learning_rate": 1.830525744228602e-05, + "loss": 0.9615, + "step": 3167 + }, + { + "epoch": 0.53, + "learning_rate": 1.8302232013164518e-05, + "loss": 0.9039, + "step": 3168 + }, + { + "epoch": 0.53, + "learning_rate": 1.829920413645824e-05, + "loss": 0.9887, + "step": 3169 + }, + { + "epoch": 0.53, + "learning_rate": 1.8296173813059844e-05, + "loss": 0.8777, + "step": 3170 + }, + { + "epoch": 0.53, + "learning_rate": 1.82931410438627e-05, + "loss": 1.0049, + "step": 3171 + }, + { + "epoch": 0.53, + "learning_rate": 1.8290105829760902e-05, + "loss": 0.9121, + "step": 3172 + }, + { + "epoch": 0.53, + "learning_rate": 1.8287068171649257e-05, + "loss": 0.895, + "step": 3173 + }, + { + "epoch": 0.53, + "learning_rate": 1.82840280704233e-05, + "loss": 0.9177, + "step": 3174 + }, + { + "epoch": 0.53, + "learning_rate": 1.828098552697929e-05, + "loss": 0.8369, + "step": 3175 + }, + { + "epoch": 0.53, + "learning_rate": 1.8277940542214195e-05, + "loss": 0.9514, + "step": 3176 + }, + { + "epoch": 0.53, + "learning_rate": 1.827489311702571e-05, + "loss": 0.9095, + "step": 3177 + }, + { + "epoch": 0.53, + "learning_rate": 1.8271843252312248e-05, + "loss": 0.9269, + "step": 3178 + }, + { + "epoch": 0.53, + "learning_rate": 1.8268790948972942e-05, + "loss": 0.9461, + "step": 3179 + }, + { + "epoch": 0.53, + "learning_rate": 1.8265736207907637e-05, + "loss": 0.9459, + "step": 3180 + }, + { + "epoch": 0.53, + "learning_rate": 1.8262679030016913e-05, + "loss": 0.9366, + "step": 3181 + }, + { + "epoch": 0.53, + "learning_rate": 1.825961941620205e-05, + "loss": 0.9501, + "step": 3182 + }, + { + "epoch": 0.53, + "learning_rate": 1.8256557367365057e-05, + "loss": 0.9227, + "step": 3183 + }, + { + "epoch": 0.53, + "learning_rate": 1.8253492884408658e-05, + "loss": 0.9327, + "step": 3184 + }, + { + "epoch": 0.53, + "learning_rate": 1.825042596823629e-05, + "loss": 0.3881, + "step": 3185 + }, + { + "epoch": 0.53, + "learning_rate": 1.824735661975212e-05, + "loss": 0.9978, + "step": 3186 + }, + { + "epoch": 0.53, + "learning_rate": 1.824428483986102e-05, + "loss": 0.9381, + "step": 3187 + }, + { + "epoch": 0.53, + "learning_rate": 1.8241210629468586e-05, + "loss": 0.9968, + "step": 3188 + }, + { + "epoch": 0.53, + "learning_rate": 1.823813398948112e-05, + "loss": 0.3967, + "step": 3189 + }, + { + "epoch": 0.53, + "learning_rate": 1.8235054920805653e-05, + "loss": 0.9824, + "step": 3190 + }, + { + "epoch": 0.53, + "learning_rate": 1.8231973424349924e-05, + "loss": 0.9434, + "step": 3191 + }, + { + "epoch": 0.54, + "learning_rate": 1.8228889501022395e-05, + "loss": 0.9479, + "step": 3192 + }, + { + "epoch": 0.54, + "learning_rate": 1.8225803151732236e-05, + "loss": 0.9862, + "step": 3193 + }, + { + "epoch": 0.54, + "learning_rate": 1.822271437738933e-05, + "loss": 0.3556, + "step": 3194 + }, + { + "epoch": 0.54, + "learning_rate": 1.821962317890429e-05, + "loss": 0.9262, + "step": 3195 + }, + { + "epoch": 0.54, + "learning_rate": 1.8216529557188424e-05, + "loss": 0.8225, + "step": 3196 + }, + { + "epoch": 0.54, + "learning_rate": 1.821343351315377e-05, + "loss": 0.9448, + "step": 3197 + }, + { + "epoch": 0.54, + "learning_rate": 1.8210335047713074e-05, + "loss": 0.8588, + "step": 3198 + }, + { + "epoch": 0.54, + "learning_rate": 1.8207234161779793e-05, + "loss": 0.8855, + "step": 3199 + }, + { + "epoch": 0.54, + "learning_rate": 1.82041308562681e-05, + "loss": 0.947, + "step": 3200 + }, + { + "epoch": 0.54, + "learning_rate": 1.820102513209289e-05, + "loss": 0.9701, + "step": 3201 + }, + { + "epoch": 0.54, + "learning_rate": 1.819791699016975e-05, + "loss": 0.9959, + "step": 3202 + }, + { + "epoch": 0.54, + "learning_rate": 1.8194806431415005e-05, + "loss": 0.891, + "step": 3203 + }, + { + "epoch": 0.54, + "learning_rate": 1.8191693456745673e-05, + "loss": 0.972, + "step": 3204 + }, + { + "epoch": 0.54, + "learning_rate": 1.818857806707949e-05, + "loss": 0.9288, + "step": 3205 + }, + { + "epoch": 0.54, + "learning_rate": 1.8185460263334914e-05, + "loss": 0.9651, + "step": 3206 + }, + { + "epoch": 0.54, + "learning_rate": 1.81823400464311e-05, + "loss": 0.911, + "step": 3207 + }, + { + "epoch": 0.54, + "learning_rate": 1.817921741728792e-05, + "loss": 0.9132, + "step": 3208 + }, + { + "epoch": 0.54, + "learning_rate": 1.817609237682596e-05, + "loss": 0.9288, + "step": 3209 + }, + { + "epoch": 0.54, + "learning_rate": 1.817296492596651e-05, + "loss": 0.9253, + "step": 3210 + }, + { + "epoch": 0.54, + "learning_rate": 1.816983506563158e-05, + "loss": 0.9335, + "step": 3211 + }, + { + "epoch": 0.54, + "learning_rate": 1.816670279674389e-05, + "loss": 0.9411, + "step": 3212 + }, + { + "epoch": 0.54, + "learning_rate": 1.8163568120226856e-05, + "loss": 0.9531, + "step": 3213 + }, + { + "epoch": 0.54, + "learning_rate": 1.8160431037004618e-05, + "loss": 0.8717, + "step": 3214 + }, + { + "epoch": 0.54, + "learning_rate": 1.815729154800202e-05, + "loss": 0.9671, + "step": 3215 + }, + { + "epoch": 0.54, + "learning_rate": 1.815414965414462e-05, + "loss": 0.9379, + "step": 3216 + }, + { + "epoch": 0.54, + "learning_rate": 1.8151005356358673e-05, + "loss": 0.9828, + "step": 3217 + }, + { + "epoch": 0.54, + "learning_rate": 1.8147858655571156e-05, + "loss": 0.9244, + "step": 3218 + }, + { + "epoch": 0.54, + "learning_rate": 1.814470955270975e-05, + "loss": 0.9413, + "step": 3219 + }, + { + "epoch": 0.54, + "learning_rate": 1.8141558048702844e-05, + "loss": 0.4005, + "step": 3220 + }, + { + "epoch": 0.54, + "learning_rate": 1.813840414447953e-05, + "loss": 0.927, + "step": 3221 + }, + { + "epoch": 0.54, + "learning_rate": 1.8135247840969615e-05, + "loss": 0.9265, + "step": 3222 + }, + { + "epoch": 0.54, + "learning_rate": 1.8132089139103612e-05, + "loss": 0.8911, + "step": 3223 + }, + { + "epoch": 0.54, + "learning_rate": 1.8128928039812736e-05, + "loss": 0.3681, + "step": 3224 + }, + { + "epoch": 0.54, + "learning_rate": 1.8125764544028913e-05, + "loss": 0.883, + "step": 3225 + }, + { + "epoch": 0.54, + "learning_rate": 1.8122598652684774e-05, + "loss": 0.8744, + "step": 3226 + }, + { + "epoch": 0.54, + "learning_rate": 1.8119430366713658e-05, + "loss": 0.9763, + "step": 3227 + }, + { + "epoch": 0.54, + "learning_rate": 1.8116259687049612e-05, + "loss": 0.9339, + "step": 3228 + }, + { + "epoch": 0.54, + "learning_rate": 1.8113086614627377e-05, + "loss": 0.9451, + "step": 3229 + }, + { + "epoch": 0.54, + "learning_rate": 1.8109911150382417e-05, + "loss": 0.9649, + "step": 3230 + }, + { + "epoch": 0.54, + "learning_rate": 1.8106733295250885e-05, + "loss": 0.9246, + "step": 3231 + }, + { + "epoch": 0.54, + "learning_rate": 1.8103553050169652e-05, + "loss": 0.9334, + "step": 3232 + }, + { + "epoch": 0.54, + "learning_rate": 1.810037041607628e-05, + "loss": 0.9467, + "step": 3233 + }, + { + "epoch": 0.54, + "learning_rate": 1.809718539390905e-05, + "loss": 0.966, + "step": 3234 + }, + { + "epoch": 0.54, + "learning_rate": 1.8093997984606936e-05, + "loss": 0.9202, + "step": 3235 + }, + { + "epoch": 0.54, + "learning_rate": 1.809080818910962e-05, + "loss": 0.9534, + "step": 3236 + }, + { + "epoch": 0.54, + "learning_rate": 1.8087616008357488e-05, + "loss": 0.9689, + "step": 3237 + }, + { + "epoch": 0.54, + "learning_rate": 1.8084421443291633e-05, + "loss": 0.9143, + "step": 3238 + }, + { + "epoch": 0.54, + "learning_rate": 1.8081224494853834e-05, + "loss": 0.931, + "step": 3239 + }, + { + "epoch": 0.54, + "learning_rate": 1.8078025163986595e-05, + "loss": 0.9295, + "step": 3240 + }, + { + "epoch": 0.54, + "learning_rate": 1.807482345163311e-05, + "loss": 1.0268, + "step": 3241 + }, + { + "epoch": 0.54, + "learning_rate": 1.8071619358737274e-05, + "loss": 0.9188, + "step": 3242 + }, + { + "epoch": 0.54, + "learning_rate": 1.8068412886243692e-05, + "loss": 0.9359, + "step": 3243 + }, + { + "epoch": 0.54, + "learning_rate": 1.806520403509766e-05, + "loss": 0.8977, + "step": 3244 + }, + { + "epoch": 0.54, + "learning_rate": 1.8061992806245186e-05, + "loss": 0.8984, + "step": 3245 + }, + { + "epoch": 0.54, + "learning_rate": 1.805877920063297e-05, + "loss": 0.9241, + "step": 3246 + }, + { + "epoch": 0.54, + "learning_rate": 1.805556321920842e-05, + "loss": 0.9044, + "step": 3247 + }, + { + "epoch": 0.54, + "learning_rate": 1.8052344862919637e-05, + "loss": 0.9095, + "step": 3248 + }, + { + "epoch": 0.54, + "learning_rate": 1.804912413271543e-05, + "loss": 0.949, + "step": 3249 + }, + { + "epoch": 0.54, + "learning_rate": 1.8045901029545304e-05, + "loss": 0.8965, + "step": 3250 + }, + { + "epoch": 0.55, + "learning_rate": 1.8042675554359456e-05, + "loss": 0.907, + "step": 3251 + }, + { + "epoch": 0.55, + "learning_rate": 1.80394477081088e-05, + "loss": 0.9232, + "step": 3252 + }, + { + "epoch": 0.55, + "learning_rate": 1.8036217491744935e-05, + "loss": 0.858, + "step": 3253 + }, + { + "epoch": 0.55, + "learning_rate": 1.803298490622016e-05, + "loss": 0.9244, + "step": 3254 + }, + { + "epoch": 0.55, + "learning_rate": 1.8029749952487474e-05, + "loss": 1.0402, + "step": 3255 + }, + { + "epoch": 0.55, + "learning_rate": 1.8026512631500583e-05, + "loss": 0.9724, + "step": 3256 + }, + { + "epoch": 0.55, + "learning_rate": 1.8023272944213875e-05, + "loss": 0.928, + "step": 3257 + }, + { + "epoch": 0.55, + "learning_rate": 1.802003089158245e-05, + "loss": 0.9451, + "step": 3258 + }, + { + "epoch": 0.55, + "learning_rate": 1.8016786474562093e-05, + "loss": 0.9084, + "step": 3259 + }, + { + "epoch": 0.55, + "learning_rate": 1.8013539694109293e-05, + "loss": 0.9432, + "step": 3260 + }, + { + "epoch": 0.55, + "learning_rate": 1.8010290551181237e-05, + "loss": 0.9816, + "step": 3261 + }, + { + "epoch": 0.55, + "learning_rate": 1.8007039046735807e-05, + "loss": 0.9397, + "step": 3262 + }, + { + "epoch": 0.55, + "learning_rate": 1.800378518173158e-05, + "loss": 0.9472, + "step": 3263 + }, + { + "epoch": 0.55, + "learning_rate": 1.8000528957127828e-05, + "loss": 0.9552, + "step": 3264 + }, + { + "epoch": 0.55, + "learning_rate": 1.7997270373884523e-05, + "loss": 1.0117, + "step": 3265 + }, + { + "epoch": 0.55, + "learning_rate": 1.7994009432962326e-05, + "loss": 0.9324, + "step": 3266 + }, + { + "epoch": 0.55, + "learning_rate": 1.7990746135322593e-05, + "loss": 0.9229, + "step": 3267 + }, + { + "epoch": 0.55, + "learning_rate": 1.798748048192739e-05, + "loss": 0.8987, + "step": 3268 + }, + { + "epoch": 0.55, + "learning_rate": 1.798421247373945e-05, + "loss": 0.891, + "step": 3269 + }, + { + "epoch": 0.55, + "learning_rate": 1.7980942111722234e-05, + "loss": 0.4003, + "step": 3270 + }, + { + "epoch": 0.55, + "learning_rate": 1.7977669396839866e-05, + "loss": 0.94, + "step": 3271 + }, + { + "epoch": 0.55, + "learning_rate": 1.797439433005718e-05, + "loss": 0.3928, + "step": 3272 + }, + { + "epoch": 0.55, + "learning_rate": 1.7971116912339698e-05, + "loss": 0.9597, + "step": 3273 + }, + { + "epoch": 0.55, + "learning_rate": 1.7967837144653643e-05, + "loss": 0.9384, + "step": 3274 + }, + { + "epoch": 0.55, + "learning_rate": 1.796455502796592e-05, + "loss": 0.9491, + "step": 3275 + }, + { + "epoch": 0.55, + "learning_rate": 1.796127056324413e-05, + "loss": 0.9271, + "step": 3276 + }, + { + "epoch": 0.55, + "learning_rate": 1.795798375145657e-05, + "loss": 0.9599, + "step": 3277 + }, + { + "epoch": 0.55, + "learning_rate": 1.7954694593572225e-05, + "loss": 0.9302, + "step": 3278 + }, + { + "epoch": 0.55, + "learning_rate": 1.795140309056078e-05, + "loss": 0.9182, + "step": 3279 + }, + { + "epoch": 0.55, + "learning_rate": 1.794810924339259e-05, + "loss": 0.9389, + "step": 3280 + }, + { + "epoch": 0.55, + "learning_rate": 1.7944813053038733e-05, + "loss": 0.9993, + "step": 3281 + }, + { + "epoch": 0.55, + "learning_rate": 1.7941514520470944e-05, + "loss": 0.8798, + "step": 3282 + }, + { + "epoch": 0.55, + "learning_rate": 1.793821364666168e-05, + "loss": 0.9094, + "step": 3283 + }, + { + "epoch": 0.55, + "learning_rate": 1.7934910432584057e-05, + "loss": 0.9394, + "step": 3284 + }, + { + "epoch": 0.55, + "learning_rate": 1.793160487921191e-05, + "loss": 0.927, + "step": 3285 + }, + { + "epoch": 0.55, + "learning_rate": 1.792829698751975e-05, + "loss": 0.906, + "step": 3286 + }, + { + "epoch": 0.55, + "learning_rate": 1.7924986758482768e-05, + "loss": 0.9546, + "step": 3287 + }, + { + "epoch": 0.55, + "learning_rate": 1.7921674193076865e-05, + "loss": 0.9154, + "step": 3288 + }, + { + "epoch": 0.55, + "learning_rate": 1.7918359292278614e-05, + "loss": 0.9171, + "step": 3289 + }, + { + "epoch": 0.55, + "learning_rate": 1.7915042057065283e-05, + "loss": 0.9949, + "step": 3290 + }, + { + "epoch": 0.55, + "learning_rate": 1.791172248841483e-05, + "loss": 0.9509, + "step": 3291 + }, + { + "epoch": 0.55, + "learning_rate": 1.7908400587305896e-05, + "loss": 0.9383, + "step": 3292 + }, + { + "epoch": 0.55, + "learning_rate": 1.790507635471781e-05, + "loss": 0.9283, + "step": 3293 + }, + { + "epoch": 0.55, + "learning_rate": 1.79017497916306e-05, + "loss": 0.9678, + "step": 3294 + }, + { + "epoch": 0.55, + "learning_rate": 1.789842089902496e-05, + "loss": 0.4103, + "step": 3295 + }, + { + "epoch": 0.55, + "learning_rate": 1.7895089677882288e-05, + "loss": 0.8767, + "step": 3296 + }, + { + "epoch": 0.55, + "learning_rate": 1.7891756129184664e-05, + "loss": 0.9398, + "step": 3297 + }, + { + "epoch": 0.55, + "learning_rate": 1.7888420253914854e-05, + "loss": 0.9166, + "step": 3298 + }, + { + "epoch": 0.55, + "learning_rate": 1.78850820530563e-05, + "loss": 0.9058, + "step": 3299 + }, + { + "epoch": 0.55, + "learning_rate": 1.788174152759315e-05, + "loss": 0.9282, + "step": 3300 + }, + { + "epoch": 0.55, + "learning_rate": 1.787839867851022e-05, + "loss": 0.9334, + "step": 3301 + }, + { + "epoch": 0.55, + "learning_rate": 1.7875053506793015e-05, + "loss": 0.9496, + "step": 3302 + }, + { + "epoch": 0.55, + "learning_rate": 1.7871706013427732e-05, + "loss": 0.9312, + "step": 3303 + }, + { + "epoch": 0.55, + "learning_rate": 1.7868356199401242e-05, + "loss": 0.9189, + "step": 3304 + }, + { + "epoch": 0.55, + "learning_rate": 1.7865004065701107e-05, + "loss": 0.9421, + "step": 3305 + }, + { + "epoch": 0.55, + "learning_rate": 1.786164961331557e-05, + "loss": 0.9385, + "step": 3306 + }, + { + "epoch": 0.55, + "learning_rate": 1.7858292843233564e-05, + "loss": 0.8707, + "step": 3307 + }, + { + "epoch": 0.55, + "learning_rate": 1.7854933756444692e-05, + "loss": 0.8497, + "step": 3308 + }, + { + "epoch": 0.55, + "learning_rate": 1.7851572353939253e-05, + "loss": 0.9461, + "step": 3309 + }, + { + "epoch": 0.55, + "learning_rate": 1.784820863670822e-05, + "loss": 0.9536, + "step": 3310 + }, + { + "epoch": 0.56, + "learning_rate": 1.7844842605743256e-05, + "loss": 1.0062, + "step": 3311 + }, + { + "epoch": 0.56, + "learning_rate": 1.78414742620367e-05, + "loss": 0.97, + "step": 3312 + }, + { + "epoch": 0.56, + "learning_rate": 1.783810360658158e-05, + "loss": 0.9199, + "step": 3313 + }, + { + "epoch": 0.56, + "learning_rate": 1.783473064037159e-05, + "loss": 0.9454, + "step": 3314 + }, + { + "epoch": 0.56, + "learning_rate": 1.7831355364401123e-05, + "loss": 0.8879, + "step": 3315 + }, + { + "epoch": 0.56, + "learning_rate": 1.7827977779665245e-05, + "loss": 0.9621, + "step": 3316 + }, + { + "epoch": 0.56, + "learning_rate": 1.7824597887159704e-05, + "loss": 0.9271, + "step": 3317 + }, + { + "epoch": 0.56, + "learning_rate": 1.782121568788092e-05, + "loss": 0.9202, + "step": 3318 + }, + { + "epoch": 0.56, + "learning_rate": 1.781783118282601e-05, + "loss": 0.923, + "step": 3319 + }, + { + "epoch": 0.56, + "learning_rate": 1.781444437299276e-05, + "loss": 0.903, + "step": 3320 + }, + { + "epoch": 0.56, + "learning_rate": 1.7811055259379635e-05, + "loss": 0.9226, + "step": 3321 + }, + { + "epoch": 0.56, + "learning_rate": 1.7807663842985777e-05, + "loss": 0.9806, + "step": 3322 + }, + { + "epoch": 0.56, + "learning_rate": 1.780427012481102e-05, + "loss": 0.9211, + "step": 3323 + }, + { + "epoch": 0.56, + "learning_rate": 1.7800874105855862e-05, + "loss": 0.9632, + "step": 3324 + }, + { + "epoch": 0.56, + "learning_rate": 1.7797475787121485e-05, + "loss": 0.9256, + "step": 3325 + }, + { + "epoch": 0.56, + "learning_rate": 1.7794075169609746e-05, + "loss": 0.9261, + "step": 3326 + }, + { + "epoch": 0.56, + "learning_rate": 1.779067225432319e-05, + "loss": 0.9214, + "step": 3327 + }, + { + "epoch": 0.56, + "learning_rate": 1.7787267042265028e-05, + "loss": 0.9071, + "step": 3328 + }, + { + "epoch": 0.56, + "learning_rate": 1.778385953443915e-05, + "loss": 0.8785, + "step": 3329 + }, + { + "epoch": 0.56, + "learning_rate": 1.778044973185013e-05, + "loss": 0.9364, + "step": 3330 + }, + { + "epoch": 0.56, + "learning_rate": 1.777703763550321e-05, + "loss": 0.9221, + "step": 3331 + }, + { + "epoch": 0.56, + "learning_rate": 1.7773623246404312e-05, + "loss": 0.9186, + "step": 3332 + }, + { + "epoch": 0.56, + "learning_rate": 1.7770206565560034e-05, + "loss": 0.9018, + "step": 3333 + }, + { + "epoch": 0.56, + "learning_rate": 1.7766787593977648e-05, + "loss": 0.9435, + "step": 3334 + }, + { + "epoch": 0.56, + "learning_rate": 1.7763366332665106e-05, + "loss": 0.8644, + "step": 3335 + }, + { + "epoch": 0.56, + "learning_rate": 1.7759942782631027e-05, + "loss": 0.9303, + "step": 3336 + }, + { + "epoch": 0.56, + "learning_rate": 1.7756516944884713e-05, + "loss": 0.9567, + "step": 3337 + }, + { + "epoch": 0.56, + "learning_rate": 1.775308882043613e-05, + "loss": 0.8615, + "step": 3338 + }, + { + "epoch": 0.56, + "learning_rate": 1.7749658410295935e-05, + "loss": 0.9457, + "step": 3339 + }, + { + "epoch": 0.56, + "learning_rate": 1.7746225715475444e-05, + "loss": 0.9244, + "step": 3340 + }, + { + "epoch": 0.56, + "learning_rate": 1.7742790736986652e-05, + "loss": 0.9557, + "step": 3341 + }, + { + "epoch": 0.56, + "learning_rate": 1.7739353475842225e-05, + "loss": 0.9152, + "step": 3342 + }, + { + "epoch": 0.56, + "learning_rate": 1.7735913933055506e-05, + "loss": 0.945, + "step": 3343 + }, + { + "epoch": 0.56, + "learning_rate": 1.7732472109640504e-05, + "loss": 0.8866, + "step": 3344 + }, + { + "epoch": 0.56, + "learning_rate": 1.7729028006611908e-05, + "loss": 0.9368, + "step": 3345 + }, + { + "epoch": 0.56, + "learning_rate": 1.7725581624985073e-05, + "loss": 0.9361, + "step": 3346 + }, + { + "epoch": 0.56, + "learning_rate": 1.7722132965776035e-05, + "loss": 0.9801, + "step": 3347 + }, + { + "epoch": 0.56, + "learning_rate": 1.7718682030001485e-05, + "loss": 0.9397, + "step": 3348 + }, + { + "epoch": 0.56, + "learning_rate": 1.77152288186788e-05, + "loss": 0.9191, + "step": 3349 + }, + { + "epoch": 0.56, + "learning_rate": 1.7711773332826023e-05, + "loss": 0.8549, + "step": 3350 + }, + { + "epoch": 0.56, + "learning_rate": 1.770831557346187e-05, + "loss": 0.8748, + "step": 3351 + }, + { + "epoch": 0.56, + "learning_rate": 1.7704855541605715e-05, + "loss": 0.9398, + "step": 3352 + }, + { + "epoch": 0.56, + "learning_rate": 1.7701393238277626e-05, + "loss": 0.8899, + "step": 3353 + }, + { + "epoch": 0.56, + "learning_rate": 1.7697928664498308e-05, + "loss": 0.9712, + "step": 3354 + }, + { + "epoch": 0.56, + "learning_rate": 1.769446182128917e-05, + "loss": 0.9596, + "step": 3355 + }, + { + "epoch": 0.56, + "learning_rate": 1.769099270967227e-05, + "loss": 0.9079, + "step": 3356 + }, + { + "epoch": 0.56, + "learning_rate": 1.7687521330670333e-05, + "loss": 0.9224, + "step": 3357 + }, + { + "epoch": 0.56, + "learning_rate": 1.768404768530676e-05, + "loss": 0.9388, + "step": 3358 + }, + { + "epoch": 0.56, + "learning_rate": 1.768057177460562e-05, + "loss": 0.9207, + "step": 3359 + }, + { + "epoch": 0.56, + "learning_rate": 1.7677093599591643e-05, + "loss": 0.9219, + "step": 3360 + }, + { + "epoch": 0.56, + "learning_rate": 1.7673613161290237e-05, + "loss": 0.8982, + "step": 3361 + }, + { + "epoch": 0.56, + "learning_rate": 1.7670130460727465e-05, + "loss": 0.9219, + "step": 3362 + }, + { + "epoch": 0.56, + "learning_rate": 1.7666645498930074e-05, + "loss": 0.8917, + "step": 3363 + }, + { + "epoch": 0.56, + "learning_rate": 1.7663158276925458e-05, + "loss": 0.9593, + "step": 3364 + }, + { + "epoch": 0.56, + "learning_rate": 1.765966879574169e-05, + "loss": 0.426, + "step": 3365 + }, + { + "epoch": 0.56, + "learning_rate": 1.7656177056407508e-05, + "loss": 0.9458, + "step": 3366 + }, + { + "epoch": 0.56, + "learning_rate": 1.7652683059952306e-05, + "loss": 0.936, + "step": 3367 + }, + { + "epoch": 0.56, + "learning_rate": 1.7649186807406153e-05, + "loss": 0.9417, + "step": 3368 + }, + { + "epoch": 0.56, + "learning_rate": 1.7645688299799787e-05, + "loss": 0.9557, + "step": 3369 + }, + { + "epoch": 0.56, + "learning_rate": 1.7642187538164595e-05, + "loss": 0.9066, + "step": 3370 + }, + { + "epoch": 0.57, + "learning_rate": 1.763868452353265e-05, + "loss": 0.9209, + "step": 3371 + }, + { + "epoch": 0.57, + "learning_rate": 1.763517925693667e-05, + "loss": 0.9421, + "step": 3372 + }, + { + "epoch": 0.57, + "learning_rate": 1.7631671739410042e-05, + "loss": 0.9845, + "step": 3373 + }, + { + "epoch": 0.57, + "learning_rate": 1.7628161971986822e-05, + "loss": 0.9913, + "step": 3374 + }, + { + "epoch": 0.57, + "learning_rate": 1.7624649955701726e-05, + "loss": 0.9748, + "step": 3375 + }, + { + "epoch": 0.57, + "learning_rate": 1.7621135691590132e-05, + "loss": 1.022, + "step": 3376 + }, + { + "epoch": 0.57, + "learning_rate": 1.7617619180688087e-05, + "loss": 0.9211, + "step": 3377 + }, + { + "epoch": 0.57, + "learning_rate": 1.7614100424032284e-05, + "loss": 0.8781, + "step": 3378 + }, + { + "epoch": 0.57, + "learning_rate": 1.7610579422660103e-05, + "loss": 0.9361, + "step": 3379 + }, + { + "epoch": 0.57, + "learning_rate": 1.760705617760956e-05, + "loss": 0.9269, + "step": 3380 + }, + { + "epoch": 0.57, + "learning_rate": 1.760353068991935e-05, + "loss": 0.9513, + "step": 3381 + }, + { + "epoch": 0.57, + "learning_rate": 1.7600002960628827e-05, + "loss": 0.9334, + "step": 3382 + }, + { + "epoch": 0.57, + "learning_rate": 1.7596472990777996e-05, + "loss": 0.9387, + "step": 3383 + }, + { + "epoch": 0.57, + "learning_rate": 1.7592940781407533e-05, + "loss": 0.9216, + "step": 3384 + }, + { + "epoch": 0.57, + "learning_rate": 1.758940633355877e-05, + "loss": 0.9122, + "step": 3385 + }, + { + "epoch": 0.57, + "learning_rate": 1.7585869648273697e-05, + "loss": 0.9562, + "step": 3386 + }, + { + "epoch": 0.57, + "learning_rate": 1.7582330726594964e-05, + "loss": 0.9102, + "step": 3387 + }, + { + "epoch": 0.57, + "learning_rate": 1.757878956956589e-05, + "loss": 0.8911, + "step": 3388 + }, + { + "epoch": 0.57, + "learning_rate": 1.757524617823044e-05, + "loss": 0.9631, + "step": 3389 + }, + { + "epoch": 0.57, + "learning_rate": 1.7571700553633236e-05, + "loss": 0.9522, + "step": 3390 + }, + { + "epoch": 0.57, + "learning_rate": 1.756815269681958e-05, + "loss": 0.9294, + "step": 3391 + }, + { + "epoch": 0.57, + "learning_rate": 1.7564602608835407e-05, + "loss": 0.9593, + "step": 3392 + }, + { + "epoch": 0.57, + "learning_rate": 1.7561050290727322e-05, + "loss": 0.938, + "step": 3393 + }, + { + "epoch": 0.57, + "learning_rate": 1.7557495743542586e-05, + "loss": 0.9188, + "step": 3394 + }, + { + "epoch": 0.57, + "learning_rate": 1.7553938968329114e-05, + "loss": 0.9021, + "step": 3395 + }, + { + "epoch": 0.57, + "learning_rate": 1.7550379966135486e-05, + "loss": 0.9704, + "step": 3396 + }, + { + "epoch": 0.57, + "learning_rate": 1.754681873801093e-05, + "loss": 0.9323, + "step": 3397 + }, + { + "epoch": 0.57, + "learning_rate": 1.7543255285005335e-05, + "loss": 0.9334, + "step": 3398 + }, + { + "epoch": 0.57, + "learning_rate": 1.753968960816924e-05, + "loss": 1.0382, + "step": 3399 + }, + { + "epoch": 0.57, + "learning_rate": 1.7536121708553845e-05, + "loss": 0.9471, + "step": 3400 + }, + { + "epoch": 0.57, + "learning_rate": 1.7532551587211006e-05, + "loss": 0.8923, + "step": 3401 + }, + { + "epoch": 0.57, + "learning_rate": 1.7528979245193233e-05, + "loss": 0.9571, + "step": 3402 + }, + { + "epoch": 0.57, + "learning_rate": 1.752540468355369e-05, + "loss": 0.9602, + "step": 3403 + }, + { + "epoch": 0.57, + "learning_rate": 1.752182790334619e-05, + "loss": 0.9552, + "step": 3404 + }, + { + "epoch": 0.57, + "learning_rate": 1.7518248905625214e-05, + "loss": 0.924, + "step": 3405 + }, + { + "epoch": 0.57, + "learning_rate": 1.7514667691445873e-05, + "loss": 0.9993, + "step": 3406 + }, + { + "epoch": 0.57, + "learning_rate": 1.7511084261863965e-05, + "loss": 0.4009, + "step": 3407 + }, + { + "epoch": 0.57, + "learning_rate": 1.750749861793591e-05, + "loss": 0.9319, + "step": 3408 + }, + { + "epoch": 0.57, + "learning_rate": 1.7503910760718797e-05, + "loss": 0.9293, + "step": 3409 + }, + { + "epoch": 0.57, + "learning_rate": 1.7500320691270365e-05, + "loss": 0.9957, + "step": 3410 + }, + { + "epoch": 0.57, + "learning_rate": 1.7496728410649e-05, + "loss": 0.8953, + "step": 3411 + }, + { + "epoch": 0.57, + "learning_rate": 1.749313391991375e-05, + "loss": 0.8797, + "step": 3412 + }, + { + "epoch": 0.57, + "learning_rate": 1.74895372201243e-05, + "loss": 0.9476, + "step": 3413 + }, + { + "epoch": 0.57, + "learning_rate": 1.7485938312341003e-05, + "loss": 0.9351, + "step": 3414 + }, + { + "epoch": 0.57, + "learning_rate": 1.7482337197624853e-05, + "loss": 0.9789, + "step": 3415 + }, + { + "epoch": 0.57, + "learning_rate": 1.747873387703749e-05, + "loss": 0.9376, + "step": 3416 + }, + { + "epoch": 0.57, + "learning_rate": 1.7475128351641216e-05, + "loss": 0.9194, + "step": 3417 + }, + { + "epoch": 0.57, + "learning_rate": 1.7471520622498982e-05, + "loss": 0.9389, + "step": 3418 + }, + { + "epoch": 0.57, + "learning_rate": 1.7467910690674372e-05, + "loss": 0.9026, + "step": 3419 + }, + { + "epoch": 0.57, + "learning_rate": 1.7464298557231642e-05, + "loss": 0.9227, + "step": 3420 + }, + { + "epoch": 0.57, + "learning_rate": 1.746068422323568e-05, + "loss": 0.3725, + "step": 3421 + }, + { + "epoch": 0.57, + "learning_rate": 1.7457067689752033e-05, + "loss": 0.99, + "step": 3422 + }, + { + "epoch": 0.57, + "learning_rate": 1.7453448957846896e-05, + "loss": 0.9522, + "step": 3423 + }, + { + "epoch": 0.57, + "learning_rate": 1.7449828028587105e-05, + "loss": 0.9141, + "step": 3424 + }, + { + "epoch": 0.57, + "learning_rate": 1.7446204903040148e-05, + "loss": 0.9128, + "step": 3425 + }, + { + "epoch": 0.57, + "learning_rate": 1.744257958227416e-05, + "loss": 0.9486, + "step": 3426 + }, + { + "epoch": 0.57, + "learning_rate": 1.743895206735792e-05, + "loss": 0.9267, + "step": 3427 + }, + { + "epoch": 0.57, + "learning_rate": 1.7435322359360866e-05, + "loss": 0.8818, + "step": 3428 + }, + { + "epoch": 0.57, + "learning_rate": 1.743169045935307e-05, + "loss": 0.9356, + "step": 3429 + }, + { + "epoch": 0.58, + "learning_rate": 1.7428056368405247e-05, + "loss": 0.9144, + "step": 3430 + }, + { + "epoch": 0.58, + "learning_rate": 1.7424420087588777e-05, + "loss": 0.8787, + "step": 3431 + }, + { + "epoch": 0.58, + "learning_rate": 1.7420781617975667e-05, + "loss": 0.9566, + "step": 3432 + }, + { + "epoch": 0.58, + "learning_rate": 1.7417140960638574e-05, + "loss": 0.9871, + "step": 3433 + }, + { + "epoch": 0.58, + "learning_rate": 1.7413498116650806e-05, + "loss": 0.8999, + "step": 3434 + }, + { + "epoch": 0.58, + "learning_rate": 1.7409853087086312e-05, + "loss": 1.0207, + "step": 3435 + }, + { + "epoch": 0.58, + "learning_rate": 1.7406205873019684e-05, + "loss": 0.9479, + "step": 3436 + }, + { + "epoch": 0.58, + "learning_rate": 1.7402556475526155e-05, + "loss": 0.9146, + "step": 3437 + }, + { + "epoch": 0.58, + "learning_rate": 1.739890489568161e-05, + "loss": 0.9576, + "step": 3438 + }, + { + "epoch": 0.58, + "learning_rate": 1.7395251134562566e-05, + "loss": 0.9733, + "step": 3439 + }, + { + "epoch": 0.58, + "learning_rate": 1.7391595193246197e-05, + "loss": 0.9065, + "step": 3440 + }, + { + "epoch": 0.58, + "learning_rate": 1.7387937072810313e-05, + "loss": 0.8829, + "step": 3441 + }, + { + "epoch": 0.58, + "learning_rate": 1.7384276774333363e-05, + "loss": 0.9192, + "step": 3442 + }, + { + "epoch": 0.58, + "learning_rate": 1.7380614298894443e-05, + "loss": 0.8616, + "step": 3443 + }, + { + "epoch": 0.58, + "learning_rate": 1.7376949647573287e-05, + "loss": 0.9838, + "step": 3444 + }, + { + "epoch": 0.58, + "learning_rate": 1.737328282145027e-05, + "loss": 0.9812, + "step": 3445 + }, + { + "epoch": 0.58, + "learning_rate": 1.736961382160642e-05, + "loss": 0.9059, + "step": 3446 + }, + { + "epoch": 0.58, + "learning_rate": 1.736594264912339e-05, + "loss": 0.9639, + "step": 3447 + }, + { + "epoch": 0.58, + "learning_rate": 1.736226930508348e-05, + "loss": 0.9968, + "step": 3448 + }, + { + "epoch": 0.58, + "learning_rate": 1.735859379056963e-05, + "loss": 0.8571, + "step": 3449 + }, + { + "epoch": 0.58, + "learning_rate": 1.7354916106665422e-05, + "loss": 0.9024, + "step": 3450 + }, + { + "epoch": 0.58, + "learning_rate": 1.7351236254455077e-05, + "loss": 0.9467, + "step": 3451 + }, + { + "epoch": 0.58, + "learning_rate": 1.7347554235023447e-05, + "loss": 0.8832, + "step": 3452 + }, + { + "epoch": 0.58, + "learning_rate": 1.734387004945604e-05, + "loss": 0.9777, + "step": 3453 + }, + { + "epoch": 0.58, + "learning_rate": 1.734018369883898e-05, + "loss": 0.9455, + "step": 3454 + }, + { + "epoch": 0.58, + "learning_rate": 1.7336495184259057e-05, + "loss": 0.8873, + "step": 3455 + }, + { + "epoch": 0.58, + "learning_rate": 1.733280450680367e-05, + "loss": 0.9469, + "step": 3456 + }, + { + "epoch": 0.58, + "learning_rate": 1.7329111667560875e-05, + "loss": 0.8685, + "step": 3457 + }, + { + "epoch": 0.58, + "learning_rate": 1.732541666761936e-05, + "loss": 0.9316, + "step": 3458 + }, + { + "epoch": 0.58, + "learning_rate": 1.732171950806845e-05, + "loss": 0.9201, + "step": 3459 + }, + { + "epoch": 0.58, + "learning_rate": 1.7318020189998103e-05, + "loss": 0.8945, + "step": 3460 + }, + { + "epoch": 0.58, + "learning_rate": 1.7314318714498922e-05, + "loss": 0.9633, + "step": 3461 + }, + { + "epoch": 0.58, + "learning_rate": 1.7310615082662133e-05, + "loss": 0.9124, + "step": 3462 + }, + { + "epoch": 0.58, + "learning_rate": 1.730690929557961e-05, + "loss": 0.8791, + "step": 3463 + }, + { + "epoch": 0.58, + "learning_rate": 1.730320135434386e-05, + "loss": 0.9286, + "step": 3464 + }, + { + "epoch": 0.58, + "learning_rate": 1.729949126004802e-05, + "loss": 0.9455, + "step": 3465 + }, + { + "epoch": 0.58, + "learning_rate": 1.7295779013785865e-05, + "loss": 0.8606, + "step": 3466 + }, + { + "epoch": 0.58, + "learning_rate": 1.72920646166518e-05, + "loss": 0.9164, + "step": 3467 + }, + { + "epoch": 0.58, + "learning_rate": 1.7288348069740878e-05, + "loss": 1.006, + "step": 3468 + }, + { + "epoch": 0.58, + "learning_rate": 1.7284629374148764e-05, + "loss": 0.9051, + "step": 3469 + }, + { + "epoch": 0.58, + "learning_rate": 1.728090853097178e-05, + "loss": 0.9331, + "step": 3470 + }, + { + "epoch": 0.58, + "learning_rate": 1.727718554130686e-05, + "loss": 0.9495, + "step": 3471 + }, + { + "epoch": 0.58, + "learning_rate": 1.7273460406251584e-05, + "loss": 0.9061, + "step": 3472 + }, + { + "epoch": 0.58, + "learning_rate": 1.7269733126904162e-05, + "loss": 0.9477, + "step": 3473 + }, + { + "epoch": 0.58, + "learning_rate": 1.7266003704363432e-05, + "loss": 0.9392, + "step": 3474 + }, + { + "epoch": 0.58, + "learning_rate": 1.726227213972887e-05, + "loss": 0.979, + "step": 3475 + }, + { + "epoch": 0.58, + "learning_rate": 1.725853843410058e-05, + "loss": 0.974, + "step": 3476 + }, + { + "epoch": 0.58, + "learning_rate": 1.725480258857929e-05, + "loss": 0.8817, + "step": 3477 + }, + { + "epoch": 0.58, + "learning_rate": 1.7251064604266376e-05, + "loss": 0.9501, + "step": 3478 + }, + { + "epoch": 0.58, + "learning_rate": 1.7247324482263832e-05, + "loss": 0.9327, + "step": 3479 + }, + { + "epoch": 0.58, + "learning_rate": 1.7243582223674286e-05, + "loss": 0.9361, + "step": 3480 + }, + { + "epoch": 0.58, + "learning_rate": 1.723983782960099e-05, + "loss": 0.8766, + "step": 3481 + }, + { + "epoch": 0.58, + "learning_rate": 1.7236091301147834e-05, + "loss": 0.9444, + "step": 3482 + }, + { + "epoch": 0.58, + "learning_rate": 1.7232342639419333e-05, + "loss": 0.9139, + "step": 3483 + }, + { + "epoch": 0.58, + "learning_rate": 1.7228591845520633e-05, + "loss": 0.953, + "step": 3484 + }, + { + "epoch": 0.58, + "learning_rate": 1.7224838920557506e-05, + "loss": 0.8984, + "step": 3485 + }, + { + "epoch": 0.58, + "learning_rate": 1.7221083865636356e-05, + "loss": 0.9619, + "step": 3486 + }, + { + "epoch": 0.58, + "learning_rate": 1.721732668186421e-05, + "loss": 0.9392, + "step": 3487 + }, + { + "epoch": 0.58, + "learning_rate": 1.721356737034872e-05, + "loss": 0.8837, + "step": 3488 + }, + { + "epoch": 0.58, + "learning_rate": 1.720980593219818e-05, + "loss": 0.9298, + "step": 3489 + }, + { + "epoch": 0.59, + "learning_rate": 1.7206042368521493e-05, + "loss": 0.967, + "step": 3490 + }, + { + "epoch": 0.59, + "learning_rate": 1.7202276680428208e-05, + "loss": 0.8831, + "step": 3491 + }, + { + "epoch": 0.59, + "learning_rate": 1.7198508869028475e-05, + "loss": 0.9952, + "step": 3492 + }, + { + "epoch": 0.59, + "learning_rate": 1.7194738935433094e-05, + "loss": 0.8982, + "step": 3493 + }, + { + "epoch": 0.59, + "learning_rate": 1.719096688075348e-05, + "loss": 0.9347, + "step": 3494 + }, + { + "epoch": 0.59, + "learning_rate": 1.718719270610167e-05, + "loss": 0.8697, + "step": 3495 + }, + { + "epoch": 0.59, + "learning_rate": 1.7183416412590334e-05, + "loss": 0.4131, + "step": 3496 + }, + { + "epoch": 0.59, + "learning_rate": 1.717963800133276e-05, + "loss": 0.9085, + "step": 3497 + }, + { + "epoch": 0.59, + "learning_rate": 1.7175857473442864e-05, + "loss": 0.9731, + "step": 3498 + }, + { + "epoch": 0.59, + "learning_rate": 1.7172074830035185e-05, + "loss": 0.8984, + "step": 3499 + }, + { + "epoch": 0.59, + "learning_rate": 1.7168290072224886e-05, + "loss": 0.9444, + "step": 3500 + }, + { + "epoch": 0.59, + "learning_rate": 1.7164503201127753e-05, + "loss": 0.9419, + "step": 3501 + }, + { + "epoch": 0.59, + "learning_rate": 1.7160714217860196e-05, + "loss": 0.8905, + "step": 3502 + }, + { + "epoch": 0.59, + "learning_rate": 1.7156923123539245e-05, + "loss": 0.9164, + "step": 3503 + }, + { + "epoch": 0.59, + "learning_rate": 1.715312991928256e-05, + "loss": 0.9216, + "step": 3504 + }, + { + "epoch": 0.59, + "learning_rate": 1.714933460620841e-05, + "loss": 0.9114, + "step": 3505 + }, + { + "epoch": 0.59, + "learning_rate": 1.7145537185435694e-05, + "loss": 0.9522, + "step": 3506 + }, + { + "epoch": 0.59, + "learning_rate": 1.7141737658083936e-05, + "loss": 0.9471, + "step": 3507 + }, + { + "epoch": 0.59, + "learning_rate": 1.713793602527327e-05, + "loss": 0.9175, + "step": 3508 + }, + { + "epoch": 0.59, + "learning_rate": 1.7134132288124464e-05, + "loss": 0.9513, + "step": 3509 + }, + { + "epoch": 0.59, + "learning_rate": 1.7130326447758898e-05, + "loss": 0.9099, + "step": 3510 + }, + { + "epoch": 0.59, + "learning_rate": 1.7126518505298567e-05, + "loss": 0.9289, + "step": 3511 + }, + { + "epoch": 0.59, + "learning_rate": 1.71227084618661e-05, + "loss": 0.8793, + "step": 3512 + }, + { + "epoch": 0.59, + "learning_rate": 1.7118896318584733e-05, + "loss": 0.9352, + "step": 3513 + }, + { + "epoch": 0.59, + "learning_rate": 1.7115082076578327e-05, + "loss": 0.4131, + "step": 3514 + }, + { + "epoch": 0.59, + "learning_rate": 1.711126573697136e-05, + "loss": 0.8913, + "step": 3515 + }, + { + "epoch": 0.59, + "learning_rate": 1.7107447300888932e-05, + "loss": 0.9167, + "step": 3516 + }, + { + "epoch": 0.59, + "learning_rate": 1.710362676945675e-05, + "loss": 0.9246, + "step": 3517 + }, + { + "epoch": 0.59, + "learning_rate": 1.709980414380116e-05, + "loss": 0.9189, + "step": 3518 + }, + { + "epoch": 0.59, + "learning_rate": 1.7095979425049098e-05, + "loss": 0.9525, + "step": 3519 + }, + { + "epoch": 0.59, + "learning_rate": 1.7092152614328135e-05, + "loss": 0.9288, + "step": 3520 + }, + { + "epoch": 0.59, + "learning_rate": 1.708832371276646e-05, + "loss": 0.9096, + "step": 3521 + }, + { + "epoch": 0.59, + "learning_rate": 1.708449272149287e-05, + "loss": 0.8257, + "step": 3522 + }, + { + "epoch": 0.59, + "learning_rate": 1.7080659641636784e-05, + "loss": 0.9487, + "step": 3523 + }, + { + "epoch": 0.59, + "learning_rate": 1.7076824474328226e-05, + "loss": 0.9243, + "step": 3524 + }, + { + "epoch": 0.59, + "learning_rate": 1.707298722069785e-05, + "loss": 0.9071, + "step": 3525 + }, + { + "epoch": 0.59, + "learning_rate": 1.706914788187692e-05, + "loss": 0.943, + "step": 3526 + }, + { + "epoch": 0.59, + "learning_rate": 1.7065306458997305e-05, + "loss": 0.9461, + "step": 3527 + }, + { + "epoch": 0.59, + "learning_rate": 1.7061462953191504e-05, + "loss": 0.9969, + "step": 3528 + }, + { + "epoch": 0.59, + "learning_rate": 1.7057617365592624e-05, + "loss": 0.975, + "step": 3529 + }, + { + "epoch": 0.59, + "learning_rate": 1.705376969733438e-05, + "loss": 0.9573, + "step": 3530 + }, + { + "epoch": 0.59, + "learning_rate": 1.7049919949551103e-05, + "loss": 0.9297, + "step": 3531 + }, + { + "epoch": 0.59, + "learning_rate": 1.704606812337774e-05, + "loss": 0.9691, + "step": 3532 + }, + { + "epoch": 0.59, + "learning_rate": 1.7042214219949854e-05, + "loss": 0.9223, + "step": 3533 + }, + { + "epoch": 0.59, + "learning_rate": 1.7038358240403615e-05, + "loss": 0.9592, + "step": 3534 + }, + { + "epoch": 0.59, + "learning_rate": 1.70345001858758e-05, + "loss": 0.9236, + "step": 3535 + }, + { + "epoch": 0.59, + "learning_rate": 1.7030640057503812e-05, + "loss": 0.9, + "step": 3536 + }, + { + "epoch": 0.59, + "learning_rate": 1.7026777856425653e-05, + "loss": 0.9199, + "step": 3537 + }, + { + "epoch": 0.59, + "learning_rate": 1.702291358377994e-05, + "loss": 0.8859, + "step": 3538 + }, + { + "epoch": 0.59, + "learning_rate": 1.7019047240705902e-05, + "loss": 0.8934, + "step": 3539 + }, + { + "epoch": 0.59, + "learning_rate": 1.701517882834337e-05, + "loss": 0.89, + "step": 3540 + }, + { + "epoch": 0.59, + "learning_rate": 1.701130834783281e-05, + "loss": 0.8997, + "step": 3541 + }, + { + "epoch": 0.59, + "learning_rate": 1.7007435800315263e-05, + "loss": 0.9134, + "step": 3542 + }, + { + "epoch": 0.59, + "learning_rate": 1.7003561186932403e-05, + "loss": 0.9172, + "step": 3543 + }, + { + "epoch": 0.59, + "learning_rate": 1.699968450882651e-05, + "loss": 0.3883, + "step": 3544 + }, + { + "epoch": 0.59, + "learning_rate": 1.6995805767140465e-05, + "loss": 0.9399, + "step": 3545 + }, + { + "epoch": 0.59, + "learning_rate": 1.699192496301776e-05, + "loss": 0.9021, + "step": 3546 + }, + { + "epoch": 0.59, + "learning_rate": 1.69880420976025e-05, + "loss": 0.9369, + "step": 3547 + }, + { + "epoch": 0.59, + "learning_rate": 1.6984157172039393e-05, + "loss": 0.9014, + "step": 3548 + }, + { + "epoch": 0.59, + "learning_rate": 1.6980270187473757e-05, + "loss": 0.9021, + "step": 3549 + }, + { + "epoch": 0.6, + "learning_rate": 1.6976381145051513e-05, + "loss": 0.9619, + "step": 3550 + }, + { + "epoch": 0.6, + "learning_rate": 1.697249004591919e-05, + "loss": 0.9242, + "step": 3551 + }, + { + "epoch": 0.6, + "learning_rate": 1.696859689122393e-05, + "loss": 0.9316, + "step": 3552 + }, + { + "epoch": 0.6, + "learning_rate": 1.6964701682113477e-05, + "loss": 0.8988, + "step": 3553 + }, + { + "epoch": 0.6, + "learning_rate": 1.6960804419736172e-05, + "loss": 0.9776, + "step": 3554 + }, + { + "epoch": 0.6, + "learning_rate": 1.695690510524097e-05, + "loss": 0.9294, + "step": 3555 + }, + { + "epoch": 0.6, + "learning_rate": 1.6953003739777438e-05, + "loss": 0.9214, + "step": 3556 + }, + { + "epoch": 0.6, + "learning_rate": 1.6949100324495727e-05, + "loss": 0.9354, + "step": 3557 + }, + { + "epoch": 0.6, + "learning_rate": 1.6945194860546614e-05, + "loss": 0.3753, + "step": 3558 + }, + { + "epoch": 0.6, + "learning_rate": 1.6941287349081466e-05, + "loss": 0.951, + "step": 3559 + }, + { + "epoch": 0.6, + "learning_rate": 1.6937377791252262e-05, + "loss": 0.9457, + "step": 3560 + }, + { + "epoch": 0.6, + "learning_rate": 1.6933466188211575e-05, + "loss": 0.3796, + "step": 3561 + }, + { + "epoch": 0.6, + "learning_rate": 1.6929552541112592e-05, + "loss": 0.951, + "step": 3562 + }, + { + "epoch": 0.6, + "learning_rate": 1.6925636851109095e-05, + "loss": 0.8927, + "step": 3563 + }, + { + "epoch": 0.6, + "learning_rate": 1.692171911935547e-05, + "loss": 0.9184, + "step": 3564 + }, + { + "epoch": 0.6, + "learning_rate": 1.69177993470067e-05, + "loss": 0.919, + "step": 3565 + }, + { + "epoch": 0.6, + "learning_rate": 1.6913877535218386e-05, + "loss": 0.8708, + "step": 3566 + }, + { + "epoch": 0.6, + "learning_rate": 1.6909953685146713e-05, + "loss": 0.915, + "step": 3567 + }, + { + "epoch": 0.6, + "learning_rate": 1.6906027797948473e-05, + "loss": 0.9305, + "step": 3568 + }, + { + "epoch": 0.6, + "learning_rate": 1.6902099874781058e-05, + "loss": 0.3982, + "step": 3569 + }, + { + "epoch": 0.6, + "learning_rate": 1.689816991680246e-05, + "loss": 0.8963, + "step": 3570 + }, + { + "epoch": 0.6, + "learning_rate": 1.689423792517128e-05, + "loss": 0.9583, + "step": 3571 + }, + { + "epoch": 0.6, + "learning_rate": 1.6890303901046695e-05, + "loss": 0.9505, + "step": 3572 + }, + { + "epoch": 0.6, + "learning_rate": 1.6886367845588507e-05, + "loss": 0.9181, + "step": 3573 + }, + { + "epoch": 0.6, + "learning_rate": 1.688242975995711e-05, + "loss": 0.9161, + "step": 3574 + }, + { + "epoch": 0.6, + "learning_rate": 1.687848964531348e-05, + "loss": 1.0035, + "step": 3575 + }, + { + "epoch": 0.6, + "learning_rate": 1.6874547502819213e-05, + "loss": 0.8926, + "step": 3576 + }, + { + "epoch": 0.6, + "learning_rate": 1.6870603333636495e-05, + "loss": 1.0284, + "step": 3577 + }, + { + "epoch": 0.6, + "learning_rate": 1.6866657138928106e-05, + "loss": 0.9066, + "step": 3578 + }, + { + "epoch": 0.6, + "learning_rate": 1.6862708919857417e-05, + "loss": 0.8897, + "step": 3579 + }, + { + "epoch": 0.6, + "learning_rate": 1.685875867758842e-05, + "loss": 0.9589, + "step": 3580 + }, + { + "epoch": 0.6, + "learning_rate": 1.6854806413285674e-05, + "loss": 0.8879, + "step": 3581 + }, + { + "epoch": 0.6, + "learning_rate": 1.6850852128114357e-05, + "loss": 0.9233, + "step": 3582 + }, + { + "epoch": 0.6, + "learning_rate": 1.6846895823240228e-05, + "loss": 0.898, + "step": 3583 + }, + { + "epoch": 0.6, + "learning_rate": 1.6842937499829652e-05, + "loss": 0.9547, + "step": 3584 + }, + { + "epoch": 0.6, + "learning_rate": 1.6838977159049575e-05, + "loss": 0.9429, + "step": 3585 + }, + { + "epoch": 0.6, + "learning_rate": 1.683501480206756e-05, + "loss": 0.9378, + "step": 3586 + }, + { + "epoch": 0.6, + "learning_rate": 1.683105043005174e-05, + "loss": 0.4002, + "step": 3587 + }, + { + "epoch": 0.6, + "learning_rate": 1.6827084044170857e-05, + "loss": 0.8459, + "step": 3588 + }, + { + "epoch": 0.6, + "learning_rate": 1.6823115645594244e-05, + "loss": 0.9679, + "step": 3589 + }, + { + "epoch": 0.6, + "learning_rate": 1.681914523549183e-05, + "loss": 0.9499, + "step": 3590 + }, + { + "epoch": 0.6, + "learning_rate": 1.6815172815034128e-05, + "loss": 0.9532, + "step": 3591 + }, + { + "epoch": 0.6, + "learning_rate": 1.6811198385392246e-05, + "loss": 0.9415, + "step": 3592 + }, + { + "epoch": 0.6, + "learning_rate": 1.6807221947737895e-05, + "loss": 0.9216, + "step": 3593 + }, + { + "epoch": 0.6, + "learning_rate": 1.6803243503243368e-05, + "loss": 0.8614, + "step": 3594 + }, + { + "epoch": 0.6, + "learning_rate": 1.6799263053081548e-05, + "loss": 0.9497, + "step": 3595 + }, + { + "epoch": 0.6, + "learning_rate": 1.6795280598425918e-05, + "loss": 0.9229, + "step": 3596 + }, + { + "epoch": 0.6, + "learning_rate": 1.6791296140450547e-05, + "loss": 0.965, + "step": 3597 + }, + { + "epoch": 0.6, + "learning_rate": 1.6787309680330093e-05, + "loss": 0.925, + "step": 3598 + }, + { + "epoch": 0.6, + "learning_rate": 1.6783321219239808e-05, + "loss": 0.9822, + "step": 3599 + }, + { + "epoch": 0.6, + "learning_rate": 1.677933075835553e-05, + "loss": 0.916, + "step": 3600 + }, + { + "epoch": 0.6, + "learning_rate": 1.6775338298853687e-05, + "loss": 0.9624, + "step": 3601 + }, + { + "epoch": 0.6, + "learning_rate": 1.6771343841911302e-05, + "loss": 0.9121, + "step": 3602 + }, + { + "epoch": 0.6, + "learning_rate": 1.676734738870598e-05, + "loss": 0.9589, + "step": 3603 + }, + { + "epoch": 0.6, + "learning_rate": 1.676334894041592e-05, + "loss": 0.8506, + "step": 3604 + }, + { + "epoch": 0.6, + "learning_rate": 1.67593484982199e-05, + "loss": 0.9902, + "step": 3605 + }, + { + "epoch": 0.6, + "learning_rate": 1.6755346063297303e-05, + "loss": 0.9763, + "step": 3606 + }, + { + "epoch": 0.6, + "learning_rate": 1.6751341636828076e-05, + "loss": 0.8755, + "step": 3607 + }, + { + "epoch": 0.6, + "learning_rate": 1.6747335219992777e-05, + "loss": 0.9356, + "step": 3608 + }, + { + "epoch": 0.61, + "learning_rate": 1.674332681397253e-05, + "loss": 0.9117, + "step": 3609 + }, + { + "epoch": 0.61, + "learning_rate": 1.673931641994906e-05, + "loss": 0.9541, + "step": 3610 + }, + { + "epoch": 0.61, + "learning_rate": 1.6735304039104675e-05, + "loss": 0.4044, + "step": 3611 + }, + { + "epoch": 0.61, + "learning_rate": 1.6731289672622263e-05, + "loss": 0.9593, + "step": 3612 + }, + { + "epoch": 0.61, + "learning_rate": 1.6727273321685303e-05, + "loss": 0.9471, + "step": 3613 + }, + { + "epoch": 0.61, + "learning_rate": 1.6723254987477858e-05, + "loss": 0.934, + "step": 3614 + }, + { + "epoch": 0.61, + "learning_rate": 1.671923467118457e-05, + "loss": 0.9127, + "step": 3615 + }, + { + "epoch": 0.61, + "learning_rate": 1.6715212373990676e-05, + "loss": 0.8768, + "step": 3616 + }, + { + "epoch": 0.61, + "learning_rate": 1.6711188097081987e-05, + "loss": 0.3991, + "step": 3617 + }, + { + "epoch": 0.61, + "learning_rate": 1.6707161841644908e-05, + "loss": 0.9134, + "step": 3618 + }, + { + "epoch": 0.61, + "learning_rate": 1.6703133608866415e-05, + "loss": 0.9612, + "step": 3619 + }, + { + "epoch": 0.61, + "learning_rate": 1.6699103399934076e-05, + "loss": 0.9528, + "step": 3620 + }, + { + "epoch": 0.61, + "learning_rate": 1.6695071216036037e-05, + "loss": 0.9187, + "step": 3621 + }, + { + "epoch": 0.61, + "learning_rate": 1.6691037058361032e-05, + "loss": 0.9754, + "step": 3622 + }, + { + "epoch": 0.61, + "learning_rate": 1.6687000928098366e-05, + "loss": 0.9516, + "step": 3623 + }, + { + "epoch": 0.61, + "learning_rate": 1.668296282643794e-05, + "loss": 0.9731, + "step": 3624 + }, + { + "epoch": 0.61, + "learning_rate": 1.667892275457022e-05, + "loss": 0.9354, + "step": 3625 + }, + { + "epoch": 0.61, + "learning_rate": 1.667488071368627e-05, + "loss": 0.9931, + "step": 3626 + }, + { + "epoch": 0.61, + "learning_rate": 1.667083670497772e-05, + "loss": 0.9504, + "step": 3627 + }, + { + "epoch": 0.61, + "learning_rate": 1.666679072963679e-05, + "loss": 0.9312, + "step": 3628 + }, + { + "epoch": 0.61, + "learning_rate": 1.6662742788856275e-05, + "loss": 0.9221, + "step": 3629 + }, + { + "epoch": 0.61, + "learning_rate": 1.6658692883829548e-05, + "loss": 0.9875, + "step": 3630 + }, + { + "epoch": 0.61, + "learning_rate": 1.6654641015750564e-05, + "loss": 0.9006, + "step": 3631 + }, + { + "epoch": 0.61, + "learning_rate": 1.665058718581386e-05, + "loss": 0.9659, + "step": 3632 + }, + { + "epoch": 0.61, + "learning_rate": 1.6646531395214537e-05, + "loss": 1.0047, + "step": 3633 + }, + { + "epoch": 0.61, + "learning_rate": 1.6642473645148297e-05, + "loss": 0.9258, + "step": 3634 + }, + { + "epoch": 0.61, + "learning_rate": 1.6638413936811398e-05, + "loss": 0.9795, + "step": 3635 + }, + { + "epoch": 0.61, + "learning_rate": 1.6634352271400693e-05, + "loss": 0.9789, + "step": 3636 + }, + { + "epoch": 0.61, + "learning_rate": 1.66302886501136e-05, + "loss": 0.3833, + "step": 3637 + }, + { + "epoch": 0.61, + "learning_rate": 1.6626223074148105e-05, + "loss": 0.9269, + "step": 3638 + }, + { + "epoch": 0.61, + "learning_rate": 1.6622155544702804e-05, + "loss": 0.9533, + "step": 3639 + }, + { + "epoch": 0.61, + "learning_rate": 1.661808606297683e-05, + "loss": 0.9167, + "step": 3640 + }, + { + "epoch": 0.61, + "learning_rate": 1.6614014630169916e-05, + "loss": 0.9612, + "step": 3641 + }, + { + "epoch": 0.61, + "learning_rate": 1.660994124748236e-05, + "loss": 0.9546, + "step": 3642 + }, + { + "epoch": 0.61, + "learning_rate": 1.6605865916115044e-05, + "loss": 0.9084, + "step": 3643 + }, + { + "epoch": 0.61, + "learning_rate": 1.660178863726941e-05, + "loss": 1.0043, + "step": 3644 + }, + { + "epoch": 0.61, + "learning_rate": 1.659770941214749e-05, + "loss": 0.9405, + "step": 3645 + }, + { + "epoch": 0.61, + "learning_rate": 1.6593628241951875e-05, + "loss": 0.8695, + "step": 3646 + }, + { + "epoch": 0.61, + "learning_rate": 1.658954512788574e-05, + "loss": 0.9656, + "step": 3647 + }, + { + "epoch": 0.61, + "learning_rate": 1.658546007115283e-05, + "loss": 0.9533, + "step": 3648 + }, + { + "epoch": 0.61, + "learning_rate": 1.658137307295746e-05, + "loss": 0.9096, + "step": 3649 + }, + { + "epoch": 0.61, + "learning_rate": 1.6577284134504527e-05, + "loss": 0.8939, + "step": 3650 + }, + { + "epoch": 0.61, + "learning_rate": 1.6573193256999486e-05, + "loss": 0.9057, + "step": 3651 + }, + { + "epoch": 0.61, + "learning_rate": 1.6569100441648373e-05, + "loss": 0.9167, + "step": 3652 + }, + { + "epoch": 0.61, + "learning_rate": 1.6565005689657792e-05, + "loss": 0.9336, + "step": 3653 + }, + { + "epoch": 0.61, + "learning_rate": 1.6560909002234917e-05, + "loss": 1.0051, + "step": 3654 + }, + { + "epoch": 0.61, + "learning_rate": 1.6556810380587497e-05, + "loss": 0.99, + "step": 3655 + }, + { + "epoch": 0.61, + "learning_rate": 1.6552709825923846e-05, + "loss": 0.9451, + "step": 3656 + }, + { + "epoch": 0.61, + "learning_rate": 1.6548607339452853e-05, + "loss": 0.3751, + "step": 3657 + }, + { + "epoch": 0.61, + "learning_rate": 1.654450292238397e-05, + "loss": 0.8701, + "step": 3658 + }, + { + "epoch": 0.61, + "learning_rate": 1.654039657592723e-05, + "loss": 0.9577, + "step": 3659 + }, + { + "epoch": 0.61, + "learning_rate": 1.6536288301293218e-05, + "loss": 0.9822, + "step": 3660 + }, + { + "epoch": 0.61, + "learning_rate": 1.65321780996931e-05, + "loss": 0.9662, + "step": 3661 + }, + { + "epoch": 0.61, + "learning_rate": 1.6528065972338607e-05, + "loss": 0.936, + "step": 3662 + }, + { + "epoch": 0.61, + "learning_rate": 1.6523951920442032e-05, + "loss": 0.9469, + "step": 3663 + }, + { + "epoch": 0.61, + "learning_rate": 1.6519835945216253e-05, + "loss": 0.9173, + "step": 3664 + }, + { + "epoch": 0.61, + "learning_rate": 1.651571804787469e-05, + "loss": 1.0315, + "step": 3665 + }, + { + "epoch": 0.61, + "learning_rate": 1.6511598229631344e-05, + "loss": 0.9753, + "step": 3666 + }, + { + "epoch": 0.61, + "learning_rate": 1.6507476491700788e-05, + "loss": 0.9429, + "step": 3667 + }, + { + "epoch": 0.61, + "learning_rate": 1.6503352835298147e-05, + "loss": 0.9285, + "step": 3668 + }, + { + "epoch": 0.62, + "learning_rate": 1.6499227261639116e-05, + "loss": 0.9537, + "step": 3669 + }, + { + "epoch": 0.62, + "learning_rate": 1.649509977193996e-05, + "loss": 0.9249, + "step": 3670 + }, + { + "epoch": 0.62, + "learning_rate": 1.6490970367417515e-05, + "loss": 0.399, + "step": 3671 + }, + { + "epoch": 0.62, + "learning_rate": 1.648683904928916e-05, + "loss": 1.0453, + "step": 3672 + }, + { + "epoch": 0.62, + "learning_rate": 1.6482705818772853e-05, + "loss": 0.9539, + "step": 3673 + }, + { + "epoch": 0.62, + "learning_rate": 1.647857067708712e-05, + "loss": 0.9798, + "step": 3674 + }, + { + "epoch": 0.62, + "learning_rate": 1.6474433625451035e-05, + "loss": 0.9178, + "step": 3675 + }, + { + "epoch": 0.62, + "learning_rate": 1.6470294665084254e-05, + "loss": 0.9974, + "step": 3676 + }, + { + "epoch": 0.62, + "learning_rate": 1.6466153797206973e-05, + "loss": 0.9441, + "step": 3677 + }, + { + "epoch": 0.62, + "learning_rate": 1.6462011023039977e-05, + "loss": 0.9371, + "step": 3678 + }, + { + "epoch": 0.62, + "learning_rate": 1.6457866343804592e-05, + "loss": 0.9759, + "step": 3679 + }, + { + "epoch": 0.62, + "learning_rate": 1.645371976072271e-05, + "loss": 0.8893, + "step": 3680 + }, + { + "epoch": 0.62, + "learning_rate": 1.6449571275016795e-05, + "loss": 0.8837, + "step": 3681 + }, + { + "epoch": 0.62, + "learning_rate": 1.6445420887909858e-05, + "loss": 0.9153, + "step": 3682 + }, + { + "epoch": 0.62, + "learning_rate": 1.6441268600625476e-05, + "loss": 0.9171, + "step": 3683 + }, + { + "epoch": 0.62, + "learning_rate": 1.643711441438779e-05, + "loss": 0.9415, + "step": 3684 + }, + { + "epoch": 0.62, + "learning_rate": 1.6432958330421497e-05, + "loss": 0.9559, + "step": 3685 + }, + { + "epoch": 0.62, + "learning_rate": 1.6428800349951853e-05, + "loss": 0.8692, + "step": 3686 + }, + { + "epoch": 0.62, + "learning_rate": 1.6424640474204675e-05, + "loss": 0.9043, + "step": 3687 + }, + { + "epoch": 0.62, + "learning_rate": 1.6420478704406337e-05, + "loss": 0.8358, + "step": 3688 + }, + { + "epoch": 0.62, + "learning_rate": 1.641631504178377e-05, + "loss": 0.879, + "step": 3689 + }, + { + "epoch": 0.62, + "learning_rate": 1.6412149487564473e-05, + "loss": 0.9829, + "step": 3690 + }, + { + "epoch": 0.62, + "learning_rate": 1.6407982042976483e-05, + "loss": 0.9904, + "step": 3691 + }, + { + "epoch": 0.62, + "learning_rate": 1.640381270924842e-05, + "loss": 0.9208, + "step": 3692 + }, + { + "epoch": 0.62, + "learning_rate": 1.639964148760943e-05, + "loss": 0.9325, + "step": 3693 + }, + { + "epoch": 0.62, + "learning_rate": 1.6395468379289253e-05, + "loss": 0.9228, + "step": 3694 + }, + { + "epoch": 0.62, + "learning_rate": 1.6391293385518146e-05, + "loss": 0.8298, + "step": 3695 + }, + { + "epoch": 0.62, + "learning_rate": 1.6387116507526958e-05, + "loss": 0.8562, + "step": 3696 + }, + { + "epoch": 0.62, + "learning_rate": 1.6382937746547064e-05, + "loss": 0.9471, + "step": 3697 + }, + { + "epoch": 0.62, + "learning_rate": 1.637875710381041e-05, + "loss": 0.8117, + "step": 3698 + }, + { + "epoch": 0.62, + "learning_rate": 1.637457458054949e-05, + "loss": 0.9826, + "step": 3699 + }, + { + "epoch": 0.62, + "learning_rate": 1.637039017799736e-05, + "loss": 0.9711, + "step": 3700 + }, + { + "epoch": 0.62, + "learning_rate": 1.636620389738763e-05, + "loss": 0.9426, + "step": 3701 + }, + { + "epoch": 0.62, + "learning_rate": 1.6362015739954452e-05, + "loss": 0.9077, + "step": 3702 + }, + { + "epoch": 0.62, + "learning_rate": 1.635782570693254e-05, + "loss": 0.8598, + "step": 3703 + }, + { + "epoch": 0.62, + "learning_rate": 1.6353633799557153e-05, + "loss": 0.9995, + "step": 3704 + }, + { + "epoch": 0.62, + "learning_rate": 1.6349440019064126e-05, + "loss": 0.4036, + "step": 3705 + }, + { + "epoch": 0.62, + "learning_rate": 1.6345244366689813e-05, + "loss": 0.9843, + "step": 3706 + }, + { + "epoch": 0.62, + "learning_rate": 1.6341046843671145e-05, + "loss": 0.8571, + "step": 3707 + }, + { + "epoch": 0.62, + "learning_rate": 1.6336847451245592e-05, + "loss": 0.9359, + "step": 3708 + }, + { + "epoch": 0.62, + "learning_rate": 1.633264619065118e-05, + "loss": 0.3881, + "step": 3709 + }, + { + "epoch": 0.62, + "learning_rate": 1.6328443063126488e-05, + "loss": 0.9395, + "step": 3710 + }, + { + "epoch": 0.62, + "learning_rate": 1.6324238069910633e-05, + "loss": 0.934, + "step": 3711 + }, + { + "epoch": 0.62, + "learning_rate": 1.63200312122433e-05, + "loss": 0.9499, + "step": 3712 + }, + { + "epoch": 0.62, + "learning_rate": 1.6315822491364708e-05, + "loss": 0.9161, + "step": 3713 + }, + { + "epoch": 0.62, + "learning_rate": 1.6311611908515635e-05, + "loss": 0.9381, + "step": 3714 + }, + { + "epoch": 0.62, + "learning_rate": 1.6307399464937404e-05, + "loss": 0.8456, + "step": 3715 + }, + { + "epoch": 0.62, + "learning_rate": 1.6303185161871895e-05, + "loss": 0.8992, + "step": 3716 + }, + { + "epoch": 0.62, + "learning_rate": 1.6298969000561515e-05, + "loss": 0.9738, + "step": 3717 + }, + { + "epoch": 0.62, + "learning_rate": 1.629475098224924e-05, + "loss": 0.9451, + "step": 3718 + }, + { + "epoch": 0.62, + "learning_rate": 1.6290531108178587e-05, + "loss": 0.8864, + "step": 3719 + }, + { + "epoch": 0.62, + "learning_rate": 1.6286309379593616e-05, + "loss": 0.9344, + "step": 3720 + }, + { + "epoch": 0.62, + "learning_rate": 1.6282085797738937e-05, + "loss": 0.9893, + "step": 3721 + }, + { + "epoch": 0.62, + "learning_rate": 1.627786036385971e-05, + "loss": 0.9539, + "step": 3722 + }, + { + "epoch": 0.62, + "learning_rate": 1.6273633079201626e-05, + "loss": 0.9024, + "step": 3723 + }, + { + "epoch": 0.62, + "learning_rate": 1.6269403945010948e-05, + "loss": 0.8786, + "step": 3724 + }, + { + "epoch": 0.62, + "learning_rate": 1.626517296253446e-05, + "loss": 0.9783, + "step": 3725 + }, + { + "epoch": 0.62, + "learning_rate": 1.62609401330195e-05, + "loss": 0.9272, + "step": 3726 + }, + { + "epoch": 0.62, + "learning_rate": 1.6256705457713952e-05, + "loss": 0.8455, + "step": 3727 + }, + { + "epoch": 0.62, + "learning_rate": 1.6252468937866243e-05, + "loss": 0.9727, + "step": 3728 + }, + { + "epoch": 0.63, + "learning_rate": 1.624823057472534e-05, + "loss": 0.9355, + "step": 3729 + }, + { + "epoch": 0.63, + "learning_rate": 1.6243990369540764e-05, + "loss": 0.9111, + "step": 3730 + }, + { + "epoch": 0.63, + "learning_rate": 1.6239748323562562e-05, + "loss": 0.9031, + "step": 3731 + }, + { + "epoch": 0.63, + "learning_rate": 1.6235504438041342e-05, + "loss": 0.934, + "step": 3732 + }, + { + "epoch": 0.63, + "learning_rate": 1.623125871422824e-05, + "loss": 0.971, + "step": 3733 + }, + { + "epoch": 0.63, + "learning_rate": 1.6227011153374945e-05, + "loss": 0.9344, + "step": 3734 + }, + { + "epoch": 0.63, + "learning_rate": 1.622276175673368e-05, + "loss": 0.908, + "step": 3735 + }, + { + "epoch": 0.63, + "learning_rate": 1.6218510525557206e-05, + "loss": 0.9284, + "step": 3736 + }, + { + "epoch": 0.63, + "learning_rate": 1.621425746109884e-05, + "loss": 0.8922, + "step": 3737 + }, + { + "epoch": 0.63, + "learning_rate": 1.6210002564612425e-05, + "loss": 0.9369, + "step": 3738 + }, + { + "epoch": 0.63, + "learning_rate": 1.6205745837352346e-05, + "loss": 0.9265, + "step": 3739 + }, + { + "epoch": 0.63, + "learning_rate": 1.6201487280573536e-05, + "loss": 0.9418, + "step": 3740 + }, + { + "epoch": 0.63, + "learning_rate": 1.619722689553146e-05, + "loss": 0.924, + "step": 3741 + }, + { + "epoch": 0.63, + "learning_rate": 1.6192964683482127e-05, + "loss": 0.9582, + "step": 3742 + }, + { + "epoch": 0.63, + "learning_rate": 1.6188700645682075e-05, + "loss": 0.8807, + "step": 3743 + }, + { + "epoch": 0.63, + "learning_rate": 1.618443478338839e-05, + "loss": 0.9822, + "step": 3744 + }, + { + "epoch": 0.63, + "learning_rate": 1.6180167097858697e-05, + "loss": 0.8762, + "step": 3745 + }, + { + "epoch": 0.63, + "learning_rate": 1.6175897590351146e-05, + "loss": 0.3972, + "step": 3746 + }, + { + "epoch": 0.63, + "learning_rate": 1.617162626212444e-05, + "loss": 0.8786, + "step": 3747 + }, + { + "epoch": 0.63, + "learning_rate": 1.616735311443781e-05, + "loss": 0.8856, + "step": 3748 + }, + { + "epoch": 0.63, + "learning_rate": 1.616307814855102e-05, + "loss": 0.9055, + "step": 3749 + }, + { + "epoch": 0.63, + "learning_rate": 1.6158801365724376e-05, + "loss": 0.9103, + "step": 3750 + }, + { + "epoch": 0.63, + "learning_rate": 1.6154522767218726e-05, + "loss": 0.9748, + "step": 3751 + }, + { + "epoch": 0.63, + "learning_rate": 1.6150242354295435e-05, + "loss": 0.8972, + "step": 3752 + }, + { + "epoch": 0.63, + "learning_rate": 1.614596012821642e-05, + "loss": 0.9068, + "step": 3753 + }, + { + "epoch": 0.63, + "learning_rate": 1.614167609024412e-05, + "loss": 0.9479, + "step": 3754 + }, + { + "epoch": 0.63, + "learning_rate": 1.613739024164152e-05, + "loss": 0.9811, + "step": 3755 + }, + { + "epoch": 0.63, + "learning_rate": 1.613310258367213e-05, + "loss": 0.9103, + "step": 3756 + }, + { + "epoch": 0.63, + "learning_rate": 1.61288131176e-05, + "loss": 0.992, + "step": 3757 + }, + { + "epoch": 0.63, + "learning_rate": 1.6124521844689707e-05, + "loss": 0.4026, + "step": 3758 + }, + { + "epoch": 0.63, + "learning_rate": 1.612022876620636e-05, + "loss": 0.9441, + "step": 3759 + }, + { + "epoch": 0.63, + "learning_rate": 1.6115933883415607e-05, + "loss": 0.3549, + "step": 3760 + }, + { + "epoch": 0.63, + "learning_rate": 1.6111637197583623e-05, + "loss": 0.9483, + "step": 3761 + }, + { + "epoch": 0.63, + "learning_rate": 1.610733870997712e-05, + "loss": 0.3973, + "step": 3762 + }, + { + "epoch": 0.63, + "learning_rate": 1.6103038421863332e-05, + "loss": 0.9457, + "step": 3763 + }, + { + "epoch": 0.63, + "learning_rate": 1.609873633451003e-05, + "loss": 0.8501, + "step": 3764 + }, + { + "epoch": 0.63, + "learning_rate": 1.6094432449185513e-05, + "loss": 0.8994, + "step": 3765 + }, + { + "epoch": 0.63, + "learning_rate": 1.6090126767158616e-05, + "loss": 0.8855, + "step": 3766 + }, + { + "epoch": 0.63, + "learning_rate": 1.6085819289698695e-05, + "loss": 0.913, + "step": 3767 + }, + { + "epoch": 0.63, + "learning_rate": 1.6081510018075638e-05, + "loss": 0.8493, + "step": 3768 + }, + { + "epoch": 0.63, + "learning_rate": 1.607719895355987e-05, + "loss": 0.978, + "step": 3769 + }, + { + "epoch": 0.63, + "learning_rate": 1.6072886097422333e-05, + "loss": 0.9024, + "step": 3770 + }, + { + "epoch": 0.63, + "learning_rate": 1.60685714509345e-05, + "loss": 0.9042, + "step": 3771 + }, + { + "epoch": 0.63, + "learning_rate": 1.606425501536838e-05, + "loss": 0.8945, + "step": 3772 + }, + { + "epoch": 0.63, + "learning_rate": 1.60599367919965e-05, + "loss": 0.8866, + "step": 3773 + }, + { + "epoch": 0.63, + "learning_rate": 1.6055616782091917e-05, + "loss": 0.9226, + "step": 3774 + }, + { + "epoch": 0.63, + "learning_rate": 1.6051294986928218e-05, + "loss": 0.9626, + "step": 3775 + }, + { + "epoch": 0.63, + "learning_rate": 1.6046971407779507e-05, + "loss": 0.9291, + "step": 3776 + }, + { + "epoch": 0.63, + "learning_rate": 1.604264604592042e-05, + "loss": 0.8392, + "step": 3777 + }, + { + "epoch": 0.63, + "learning_rate": 1.603831890262613e-05, + "loss": 0.9341, + "step": 3778 + }, + { + "epoch": 0.63, + "learning_rate": 1.603398997917232e-05, + "loss": 0.9412, + "step": 3779 + }, + { + "epoch": 0.63, + "learning_rate": 1.6029659276835193e-05, + "loss": 0.9978, + "step": 3780 + }, + { + "epoch": 0.63, + "learning_rate": 1.6025326796891492e-05, + "loss": 0.9561, + "step": 3781 + }, + { + "epoch": 0.63, + "learning_rate": 1.6020992540618476e-05, + "loss": 0.9709, + "step": 3782 + }, + { + "epoch": 0.63, + "learning_rate": 1.6016656509293933e-05, + "loss": 0.9295, + "step": 3783 + }, + { + "epoch": 0.63, + "learning_rate": 1.6012318704196164e-05, + "loss": 0.9483, + "step": 3784 + }, + { + "epoch": 0.63, + "learning_rate": 1.6007979126604005e-05, + "loss": 0.9806, + "step": 3785 + }, + { + "epoch": 0.63, + "learning_rate": 1.6003637777796804e-05, + "loss": 0.9104, + "step": 3786 + }, + { + "epoch": 0.63, + "learning_rate": 1.599929465905444e-05, + "loss": 0.9102, + "step": 3787 + }, + { + "epoch": 0.64, + "learning_rate": 1.5994949771657307e-05, + "loss": 0.9966, + "step": 3788 + }, + { + "epoch": 0.64, + "learning_rate": 1.5990603116886326e-05, + "loss": 1.002, + "step": 3789 + }, + { + "epoch": 0.64, + "learning_rate": 1.5986254696022935e-05, + "loss": 0.95, + "step": 3790 + }, + { + "epoch": 0.64, + "learning_rate": 1.5981904510349092e-05, + "loss": 0.981, + "step": 3791 + }, + { + "epoch": 0.64, + "learning_rate": 1.5977552561147276e-05, + "loss": 0.8768, + "step": 3792 + }, + { + "epoch": 0.64, + "learning_rate": 1.5973198849700494e-05, + "loss": 0.8693, + "step": 3793 + }, + { + "epoch": 0.64, + "learning_rate": 1.596884337729226e-05, + "loss": 0.8347, + "step": 3794 + }, + { + "epoch": 0.64, + "learning_rate": 1.5964486145206612e-05, + "loss": 0.3973, + "step": 3795 + }, + { + "epoch": 0.64, + "learning_rate": 1.5960127154728112e-05, + "loss": 0.8741, + "step": 3796 + }, + { + "epoch": 0.64, + "learning_rate": 1.5955766407141834e-05, + "loss": 0.9317, + "step": 3797 + }, + { + "epoch": 0.64, + "learning_rate": 1.5951403903733373e-05, + "loss": 0.9301, + "step": 3798 + }, + { + "epoch": 0.64, + "learning_rate": 1.5947039645788834e-05, + "loss": 0.9664, + "step": 3799 + }, + { + "epoch": 0.64, + "learning_rate": 1.5942673634594853e-05, + "loss": 0.8955, + "step": 3800 + }, + { + "epoch": 0.64, + "learning_rate": 1.593830587143858e-05, + "loss": 0.95, + "step": 3801 + }, + { + "epoch": 0.64, + "learning_rate": 1.5933936357607663e-05, + "loss": 0.9392, + "step": 3802 + }, + { + "epoch": 0.64, + "learning_rate": 1.5929565094390294e-05, + "loss": 0.9241, + "step": 3803 + }, + { + "epoch": 0.64, + "learning_rate": 1.5925192083075154e-05, + "loss": 0.9562, + "step": 3804 + }, + { + "epoch": 0.64, + "learning_rate": 1.5920817324951466e-05, + "loss": 0.9263, + "step": 3805 + }, + { + "epoch": 0.64, + "learning_rate": 1.5916440821308948e-05, + "loss": 0.8999, + "step": 3806 + }, + { + "epoch": 0.64, + "learning_rate": 1.591206257343784e-05, + "loss": 0.8597, + "step": 3807 + }, + { + "epoch": 0.64, + "learning_rate": 1.5907682582628893e-05, + "loss": 0.8645, + "step": 3808 + }, + { + "epoch": 0.64, + "learning_rate": 1.5903300850173377e-05, + "loss": 0.9019, + "step": 3809 + }, + { + "epoch": 0.64, + "learning_rate": 1.589891737736307e-05, + "loss": 0.9312, + "step": 3810 + }, + { + "epoch": 0.64, + "learning_rate": 1.5894532165490267e-05, + "loss": 0.9272, + "step": 3811 + }, + { + "epoch": 0.64, + "learning_rate": 1.5890145215847775e-05, + "loss": 0.8988, + "step": 3812 + }, + { + "epoch": 0.64, + "learning_rate": 1.5885756529728912e-05, + "loss": 0.8833, + "step": 3813 + }, + { + "epoch": 0.64, + "learning_rate": 1.5881366108427506e-05, + "loss": 0.9565, + "step": 3814 + }, + { + "epoch": 0.64, + "learning_rate": 1.5876973953237902e-05, + "loss": 0.4044, + "step": 3815 + }, + { + "epoch": 0.64, + "learning_rate": 1.5872580065454952e-05, + "loss": 0.8769, + "step": 3816 + }, + { + "epoch": 0.64, + "learning_rate": 1.586818444637402e-05, + "loss": 0.9475, + "step": 3817 + }, + { + "epoch": 0.64, + "learning_rate": 1.5863787097290987e-05, + "loss": 0.8899, + "step": 3818 + }, + { + "epoch": 0.64, + "learning_rate": 1.5859388019502225e-05, + "loss": 0.9556, + "step": 3819 + }, + { + "epoch": 0.64, + "learning_rate": 1.585498721430464e-05, + "loss": 0.9035, + "step": 3820 + }, + { + "epoch": 0.64, + "learning_rate": 1.5850584682995625e-05, + "loss": 0.9377, + "step": 3821 + }, + { + "epoch": 0.64, + "learning_rate": 1.58461804268731e-05, + "loss": 0.9087, + "step": 3822 + }, + { + "epoch": 0.64, + "learning_rate": 1.5841774447235483e-05, + "loss": 0.9132, + "step": 3823 + }, + { + "epoch": 0.64, + "learning_rate": 1.5837366745381698e-05, + "loss": 0.9807, + "step": 3824 + }, + { + "epoch": 0.64, + "learning_rate": 1.5832957322611192e-05, + "loss": 0.9937, + "step": 3825 + }, + { + "epoch": 0.64, + "learning_rate": 1.58285461802239e-05, + "loss": 0.9438, + "step": 3826 + }, + { + "epoch": 0.64, + "learning_rate": 1.582413331952028e-05, + "loss": 0.9835, + "step": 3827 + }, + { + "epoch": 0.64, + "learning_rate": 1.5819718741801283e-05, + "loss": 0.932, + "step": 3828 + }, + { + "epoch": 0.64, + "learning_rate": 1.5815302448368376e-05, + "loss": 0.9208, + "step": 3829 + }, + { + "epoch": 0.64, + "learning_rate": 1.5810884440523527e-05, + "loss": 0.9065, + "step": 3830 + }, + { + "epoch": 0.64, + "learning_rate": 1.5806464719569214e-05, + "loss": 0.8785, + "step": 3831 + }, + { + "epoch": 0.64, + "learning_rate": 1.5802043286808412e-05, + "loss": 0.9225, + "step": 3832 + }, + { + "epoch": 0.64, + "learning_rate": 1.579762014354461e-05, + "loss": 0.9491, + "step": 3833 + }, + { + "epoch": 0.64, + "learning_rate": 1.5793195291081794e-05, + "loss": 0.873, + "step": 3834 + }, + { + "epoch": 0.64, + "learning_rate": 1.5788768730724457e-05, + "loss": 0.967, + "step": 3835 + }, + { + "epoch": 0.64, + "learning_rate": 1.57843404637776e-05, + "loss": 0.9378, + "step": 3836 + }, + { + "epoch": 0.64, + "learning_rate": 1.5779910491546716e-05, + "loss": 0.9302, + "step": 3837 + }, + { + "epoch": 0.64, + "learning_rate": 1.577547881533781e-05, + "loss": 0.9696, + "step": 3838 + }, + { + "epoch": 0.64, + "learning_rate": 1.577104543645738e-05, + "loss": 0.9232, + "step": 3839 + }, + { + "epoch": 0.64, + "learning_rate": 1.5766610356212445e-05, + "loss": 0.9287, + "step": 3840 + }, + { + "epoch": 0.64, + "learning_rate": 1.5762173575910504e-05, + "loss": 0.9129, + "step": 3841 + }, + { + "epoch": 0.64, + "learning_rate": 1.5757735096859572e-05, + "loss": 0.955, + "step": 3842 + }, + { + "epoch": 0.64, + "learning_rate": 1.5753294920368153e-05, + "loss": 0.9303, + "step": 3843 + }, + { + "epoch": 0.64, + "learning_rate": 1.5748853047745264e-05, + "loss": 0.8783, + "step": 3844 + }, + { + "epoch": 0.64, + "learning_rate": 1.5744409480300403e-05, + "loss": 0.8838, + "step": 3845 + }, + { + "epoch": 0.64, + "learning_rate": 1.5739964219343596e-05, + "loss": 0.9127, + "step": 3846 + }, + { + "epoch": 0.64, + "learning_rate": 1.5735517266185344e-05, + "loss": 0.4319, + "step": 3847 + }, + { + "epoch": 0.65, + "learning_rate": 1.5731068622136655e-05, + "loss": 0.3869, + "step": 3848 + }, + { + "epoch": 0.65, + "learning_rate": 1.5726618288509036e-05, + "loss": 0.9062, + "step": 3849 + }, + { + "epoch": 0.65, + "learning_rate": 1.5722166266614496e-05, + "loss": 0.9272, + "step": 3850 + }, + { + "epoch": 0.65, + "learning_rate": 1.571771255776553e-05, + "loss": 0.8655, + "step": 3851 + }, + { + "epoch": 0.65, + "learning_rate": 1.5713257163275148e-05, + "loss": 0.9078, + "step": 3852 + }, + { + "epoch": 0.65, + "learning_rate": 1.5708800084456836e-05, + "loss": 0.9016, + "step": 3853 + }, + { + "epoch": 0.65, + "learning_rate": 1.5704341322624597e-05, + "loss": 0.9025, + "step": 3854 + }, + { + "epoch": 0.65, + "learning_rate": 1.5699880879092914e-05, + "loss": 0.9159, + "step": 3855 + }, + { + "epoch": 0.65, + "learning_rate": 1.569541875517678e-05, + "loss": 0.9376, + "step": 3856 + }, + { + "epoch": 0.65, + "learning_rate": 1.5690954952191664e-05, + "loss": 0.9039, + "step": 3857 + }, + { + "epoch": 0.65, + "learning_rate": 1.568648947145355e-05, + "loss": 0.9807, + "step": 3858 + }, + { + "epoch": 0.65, + "learning_rate": 1.5682022314278907e-05, + "loss": 0.9455, + "step": 3859 + }, + { + "epoch": 0.65, + "learning_rate": 1.5677553481984704e-05, + "loss": 0.9056, + "step": 3860 + }, + { + "epoch": 0.65, + "learning_rate": 1.5673082975888388e-05, + "loss": 1.0085, + "step": 3861 + }, + { + "epoch": 0.65, + "learning_rate": 1.566861079730792e-05, + "loss": 0.9697, + "step": 3862 + }, + { + "epoch": 0.65, + "learning_rate": 1.5664136947561745e-05, + "loss": 0.8644, + "step": 3863 + }, + { + "epoch": 0.65, + "learning_rate": 1.5659661427968792e-05, + "loss": 0.9083, + "step": 3864 + }, + { + "epoch": 0.65, + "learning_rate": 1.56551842398485e-05, + "loss": 0.9208, + "step": 3865 + }, + { + "epoch": 0.65, + "learning_rate": 1.5650705384520794e-05, + "loss": 0.9333, + "step": 3866 + }, + { + "epoch": 0.65, + "learning_rate": 1.5646224863306075e-05, + "loss": 0.9818, + "step": 3867 + }, + { + "epoch": 0.65, + "learning_rate": 1.5641742677525257e-05, + "loss": 0.9367, + "step": 3868 + }, + { + "epoch": 0.65, + "learning_rate": 1.563725882849973e-05, + "loss": 0.9479, + "step": 3869 + }, + { + "epoch": 0.65, + "learning_rate": 1.5632773317551385e-05, + "loss": 0.9391, + "step": 3870 + }, + { + "epoch": 0.65, + "learning_rate": 1.562828614600259e-05, + "loss": 0.9422, + "step": 3871 + }, + { + "epoch": 0.65, + "learning_rate": 1.562379731517622e-05, + "loss": 0.8898, + "step": 3872 + }, + { + "epoch": 0.65, + "learning_rate": 1.5619306826395617e-05, + "loss": 0.8433, + "step": 3873 + }, + { + "epoch": 0.65, + "learning_rate": 1.5614814680984637e-05, + "loss": 0.9803, + "step": 3874 + }, + { + "epoch": 0.65, + "learning_rate": 1.5610320880267603e-05, + "loss": 0.9315, + "step": 3875 + }, + { + "epoch": 0.65, + "learning_rate": 1.5605825425569333e-05, + "loss": 0.9167, + "step": 3876 + }, + { + "epoch": 0.65, + "learning_rate": 1.560132831821514e-05, + "loss": 0.896, + "step": 3877 + }, + { + "epoch": 0.65, + "learning_rate": 1.5596829559530814e-05, + "loss": 0.8898, + "step": 3878 + }, + { + "epoch": 0.65, + "learning_rate": 1.5592329150842642e-05, + "loss": 0.9268, + "step": 3879 + }, + { + "epoch": 0.65, + "learning_rate": 1.5587827093477384e-05, + "loss": 0.9011, + "step": 3880 + }, + { + "epoch": 0.65, + "learning_rate": 1.55833233887623e-05, + "loss": 0.3841, + "step": 3881 + }, + { + "epoch": 0.65, + "learning_rate": 1.557881803802512e-05, + "loss": 0.851, + "step": 3882 + }, + { + "epoch": 0.65, + "learning_rate": 1.557431104259408e-05, + "loss": 0.9543, + "step": 3883 + }, + { + "epoch": 0.65, + "learning_rate": 1.5569802403797882e-05, + "loss": 0.9674, + "step": 3884 + }, + { + "epoch": 0.65, + "learning_rate": 1.5565292122965724e-05, + "loss": 0.8878, + "step": 3885 + }, + { + "epoch": 0.65, + "learning_rate": 1.5560780201427278e-05, + "loss": 0.9161, + "step": 3886 + }, + { + "epoch": 0.65, + "learning_rate": 1.555626664051271e-05, + "loss": 0.9754, + "step": 3887 + }, + { + "epoch": 0.65, + "learning_rate": 1.5551751441552666e-05, + "loss": 0.872, + "step": 3888 + }, + { + "epoch": 0.65, + "learning_rate": 1.554723460587827e-05, + "loss": 0.8829, + "step": 3889 + }, + { + "epoch": 0.65, + "learning_rate": 1.554271613482114e-05, + "loss": 0.9849, + "step": 3890 + }, + { + "epoch": 0.65, + "learning_rate": 1.553819602971336e-05, + "loss": 0.9475, + "step": 3891 + }, + { + "epoch": 0.65, + "learning_rate": 1.5533674291887505e-05, + "loss": 0.9326, + "step": 3892 + }, + { + "epoch": 0.65, + "learning_rate": 1.5529150922676635e-05, + "loss": 0.874, + "step": 3893 + }, + { + "epoch": 0.65, + "learning_rate": 1.5524625923414284e-05, + "loss": 0.9763, + "step": 3894 + }, + { + "epoch": 0.65, + "learning_rate": 1.5520099295434468e-05, + "loss": 0.9333, + "step": 3895 + }, + { + "epoch": 0.65, + "learning_rate": 1.5515571040071687e-05, + "loss": 0.8543, + "step": 3896 + }, + { + "epoch": 0.65, + "learning_rate": 1.5511041158660916e-05, + "loss": 0.8863, + "step": 3897 + }, + { + "epoch": 0.65, + "learning_rate": 1.550650965253761e-05, + "loss": 0.9308, + "step": 3898 + }, + { + "epoch": 0.65, + "learning_rate": 1.5501976523037708e-05, + "loss": 0.9423, + "step": 3899 + }, + { + "epoch": 0.65, + "learning_rate": 1.5497441771497623e-05, + "loss": 0.9846, + "step": 3900 + }, + { + "epoch": 0.65, + "learning_rate": 1.5492905399254247e-05, + "loss": 0.9956, + "step": 3901 + }, + { + "epoch": 0.65, + "learning_rate": 1.548836740764495e-05, + "loss": 0.9688, + "step": 3902 + }, + { + "epoch": 0.65, + "learning_rate": 1.5483827798007574e-05, + "loss": 0.9678, + "step": 3903 + }, + { + "epoch": 0.65, + "learning_rate": 1.5479286571680454e-05, + "loss": 0.9352, + "step": 3904 + }, + { + "epoch": 0.65, + "learning_rate": 1.5474743730002383e-05, + "loss": 0.93, + "step": 3905 + }, + { + "epoch": 0.65, + "learning_rate": 1.547019927431264e-05, + "loss": 0.9027, + "step": 3906 + }, + { + "epoch": 0.65, + "learning_rate": 1.546565320595098e-05, + "loss": 0.9288, + "step": 3907 + }, + { + "epoch": 0.66, + "learning_rate": 1.5461105526257627e-05, + "loss": 0.9338, + "step": 3908 + }, + { + "epoch": 0.66, + "learning_rate": 1.545655623657329e-05, + "loss": 0.9699, + "step": 3909 + }, + { + "epoch": 0.66, + "learning_rate": 1.5452005338239142e-05, + "loss": 0.4017, + "step": 3910 + }, + { + "epoch": 0.66, + "learning_rate": 1.5447452832596842e-05, + "loss": 0.9015, + "step": 3911 + }, + { + "epoch": 0.66, + "learning_rate": 1.5442898720988507e-05, + "loss": 0.9549, + "step": 3912 + }, + { + "epoch": 0.66, + "learning_rate": 1.5438343004756746e-05, + "loss": 0.9366, + "step": 3913 + }, + { + "epoch": 0.66, + "learning_rate": 1.5433785685244623e-05, + "loss": 0.8673, + "step": 3914 + }, + { + "epoch": 0.66, + "learning_rate": 1.5429226763795693e-05, + "loss": 0.8937, + "step": 3915 + }, + { + "epoch": 0.66, + "learning_rate": 1.5424666241753966e-05, + "loss": 0.9442, + "step": 3916 + }, + { + "epoch": 0.66, + "learning_rate": 1.5420104120463934e-05, + "loss": 0.9098, + "step": 3917 + }, + { + "epoch": 0.66, + "learning_rate": 1.5415540401270557e-05, + "loss": 0.8892, + "step": 3918 + }, + { + "epoch": 0.66, + "learning_rate": 1.5410975085519272e-05, + "loss": 0.9801, + "step": 3919 + }, + { + "epoch": 0.66, + "learning_rate": 1.5406408174555978e-05, + "loss": 0.9038, + "step": 3920 + }, + { + "epoch": 0.66, + "learning_rate": 1.5401839669727046e-05, + "loss": 0.415, + "step": 3921 + }, + { + "epoch": 0.66, + "learning_rate": 1.5397269572379323e-05, + "loss": 0.3546, + "step": 3922 + }, + { + "epoch": 0.66, + "learning_rate": 1.5392697883860118e-05, + "loss": 0.8981, + "step": 3923 + }, + { + "epoch": 0.66, + "learning_rate": 1.5388124605517218e-05, + "loss": 0.9329, + "step": 3924 + }, + { + "epoch": 0.66, + "learning_rate": 1.5383549738698867e-05, + "loss": 0.9014, + "step": 3925 + }, + { + "epoch": 0.66, + "learning_rate": 1.5378973284753784e-05, + "loss": 0.9062, + "step": 3926 + }, + { + "epoch": 0.66, + "learning_rate": 1.537439524503116e-05, + "loss": 0.8784, + "step": 3927 + }, + { + "epoch": 0.66, + "learning_rate": 1.5369815620880647e-05, + "loss": 0.9033, + "step": 3928 + }, + { + "epoch": 0.66, + "learning_rate": 1.5365234413652365e-05, + "loss": 0.962, + "step": 3929 + }, + { + "epoch": 0.66, + "learning_rate": 1.53606516246969e-05, + "loss": 0.9354, + "step": 3930 + }, + { + "epoch": 0.66, + "learning_rate": 1.5356067255365312e-05, + "loss": 0.9507, + "step": 3931 + }, + { + "epoch": 0.66, + "learning_rate": 1.5351481307009116e-05, + "loss": 0.8941, + "step": 3932 + }, + { + "epoch": 0.66, + "learning_rate": 1.53468937809803e-05, + "loss": 0.9441, + "step": 3933 + }, + { + "epoch": 0.66, + "learning_rate": 1.5342304678631313e-05, + "loss": 0.8877, + "step": 3934 + }, + { + "epoch": 0.66, + "learning_rate": 1.5337714001315068e-05, + "loss": 0.9495, + "step": 3935 + }, + { + "epoch": 0.66, + "learning_rate": 1.5333121750384955e-05, + "loss": 0.8849, + "step": 3936 + }, + { + "epoch": 0.66, + "learning_rate": 1.53285279271948e-05, + "loss": 1.0272, + "step": 3937 + }, + { + "epoch": 0.66, + "learning_rate": 1.5323932533098927e-05, + "loss": 0.826, + "step": 3938 + }, + { + "epoch": 0.66, + "learning_rate": 1.5319335569452102e-05, + "loss": 0.8835, + "step": 3939 + }, + { + "epoch": 0.66, + "learning_rate": 1.531473703760955e-05, + "loss": 0.8808, + "step": 3940 + }, + { + "epoch": 0.66, + "learning_rate": 1.531013693892697e-05, + "loss": 0.9019, + "step": 3941 + }, + { + "epoch": 0.66, + "learning_rate": 1.5305535274760524e-05, + "loss": 0.9218, + "step": 3942 + }, + { + "epoch": 0.66, + "learning_rate": 1.5300932046466827e-05, + "loss": 0.8994, + "step": 3943 + }, + { + "epoch": 0.66, + "learning_rate": 1.529632725540296e-05, + "loss": 0.8747, + "step": 3944 + }, + { + "epoch": 0.66, + "learning_rate": 1.5291720902926462e-05, + "loss": 0.8893, + "step": 3945 + }, + { + "epoch": 0.66, + "learning_rate": 1.5287112990395338e-05, + "loss": 0.914, + "step": 3946 + }, + { + "epoch": 0.66, + "learning_rate": 1.5282503519168038e-05, + "loss": 0.9258, + "step": 3947 + }, + { + "epoch": 0.66, + "learning_rate": 1.5277892490603495e-05, + "loss": 0.9203, + "step": 3948 + }, + { + "epoch": 0.66, + "learning_rate": 1.5273279906061082e-05, + "loss": 0.919, + "step": 3949 + }, + { + "epoch": 0.66, + "learning_rate": 1.5268665766900638e-05, + "loss": 0.9257, + "step": 3950 + }, + { + "epoch": 0.66, + "learning_rate": 1.526405007448246e-05, + "loss": 0.8963, + "step": 3951 + }, + { + "epoch": 0.66, + "learning_rate": 1.52594328301673e-05, + "loss": 0.9333, + "step": 3952 + }, + { + "epoch": 0.66, + "learning_rate": 1.5254814035316376e-05, + "loss": 0.8829, + "step": 3953 + }, + { + "epoch": 0.66, + "learning_rate": 1.5250193691291353e-05, + "loss": 0.9044, + "step": 3954 + }, + { + "epoch": 0.66, + "learning_rate": 1.5245571799454354e-05, + "loss": 0.3695, + "step": 3955 + }, + { + "epoch": 0.66, + "learning_rate": 1.5240948361167964e-05, + "loss": 0.9689, + "step": 3956 + }, + { + "epoch": 0.66, + "learning_rate": 1.5236323377795224e-05, + "loss": 0.3789, + "step": 3957 + }, + { + "epoch": 0.66, + "learning_rate": 1.523169685069962e-05, + "loss": 0.948, + "step": 3958 + }, + { + "epoch": 0.66, + "learning_rate": 1.5227068781245104e-05, + "loss": 0.9303, + "step": 3959 + }, + { + "epoch": 0.66, + "learning_rate": 1.5222439170796081e-05, + "loss": 0.9687, + "step": 3960 + }, + { + "epoch": 0.66, + "learning_rate": 1.521780802071741e-05, + "loss": 0.9563, + "step": 3961 + }, + { + "epoch": 0.66, + "learning_rate": 1.5213175332374399e-05, + "loss": 0.8961, + "step": 3962 + }, + { + "epoch": 0.66, + "learning_rate": 1.520854110713281e-05, + "loss": 0.3975, + "step": 3963 + }, + { + "epoch": 0.66, + "learning_rate": 1.5203905346358871e-05, + "loss": 0.8898, + "step": 3964 + }, + { + "epoch": 0.66, + "learning_rate": 1.5199268051419243e-05, + "loss": 0.8822, + "step": 3965 + }, + { + "epoch": 0.66, + "learning_rate": 1.519462922368105e-05, + "loss": 0.9181, + "step": 3966 + }, + { + "epoch": 0.67, + "learning_rate": 1.5189988864511871e-05, + "loss": 0.9617, + "step": 3967 + }, + { + "epoch": 0.67, + "learning_rate": 1.5185346975279735e-05, + "loss": 0.9209, + "step": 3968 + }, + { + "epoch": 0.67, + "learning_rate": 1.5180703557353112e-05, + "loss": 0.8899, + "step": 3969 + }, + { + "epoch": 0.67, + "learning_rate": 1.5176058612100935e-05, + "loss": 0.907, + "step": 3970 + }, + { + "epoch": 0.67, + "learning_rate": 1.5171412140892577e-05, + "loss": 0.9216, + "step": 3971 + }, + { + "epoch": 0.67, + "learning_rate": 1.5166764145097874e-05, + "loss": 0.907, + "step": 3972 + }, + { + "epoch": 0.67, + "learning_rate": 1.5162114626087098e-05, + "loss": 0.9048, + "step": 3973 + }, + { + "epoch": 0.67, + "learning_rate": 1.515746358523098e-05, + "loss": 0.9028, + "step": 3974 + }, + { + "epoch": 0.67, + "learning_rate": 1.5152811023900688e-05, + "loss": 0.8869, + "step": 3975 + }, + { + "epoch": 0.67, + "learning_rate": 1.5148156943467855e-05, + "loss": 0.9148, + "step": 3976 + }, + { + "epoch": 0.67, + "learning_rate": 1.5143501345304547e-05, + "loss": 0.9322, + "step": 3977 + }, + { + "epoch": 0.67, + "learning_rate": 1.513884423078329e-05, + "loss": 0.9195, + "step": 3978 + }, + { + "epoch": 0.67, + "learning_rate": 1.5134185601277036e-05, + "loss": 0.9023, + "step": 3979 + }, + { + "epoch": 0.67, + "learning_rate": 1.5129525458159211e-05, + "loss": 0.9382, + "step": 3980 + }, + { + "epoch": 0.67, + "learning_rate": 1.5124863802803667e-05, + "loss": 0.8944, + "step": 3981 + }, + { + "epoch": 0.67, + "learning_rate": 1.5120200636584713e-05, + "loss": 0.9059, + "step": 3982 + }, + { + "epoch": 0.67, + "learning_rate": 1.5115535960877095e-05, + "loss": 0.8238, + "step": 3983 + }, + { + "epoch": 0.67, + "learning_rate": 1.5110869777056008e-05, + "loss": 0.9743, + "step": 3984 + }, + { + "epoch": 0.67, + "learning_rate": 1.5106202086497095e-05, + "loss": 0.9037, + "step": 3985 + }, + { + "epoch": 0.67, + "learning_rate": 1.5101532890576443e-05, + "loss": 0.97, + "step": 3986 + }, + { + "epoch": 0.67, + "learning_rate": 1.5096862190670571e-05, + "loss": 0.3614, + "step": 3987 + }, + { + "epoch": 0.67, + "learning_rate": 1.5092189988156455e-05, + "loss": 0.9855, + "step": 3988 + }, + { + "epoch": 0.67, + "learning_rate": 1.508751628441151e-05, + "loss": 0.8888, + "step": 3989 + }, + { + "epoch": 0.67, + "learning_rate": 1.5082841080813594e-05, + "loss": 0.9044, + "step": 3990 + }, + { + "epoch": 0.67, + "learning_rate": 1.5078164378740998e-05, + "loss": 0.9073, + "step": 3991 + }, + { + "epoch": 0.67, + "learning_rate": 1.507348617957247e-05, + "loss": 0.9359, + "step": 3992 + }, + { + "epoch": 0.67, + "learning_rate": 1.5068806484687191e-05, + "loss": 0.8666, + "step": 3993 + }, + { + "epoch": 0.67, + "learning_rate": 1.5064125295464783e-05, + "loss": 0.9289, + "step": 3994 + }, + { + "epoch": 0.67, + "learning_rate": 1.5059442613285308e-05, + "loss": 0.8974, + "step": 3995 + }, + { + "epoch": 0.67, + "learning_rate": 1.5054758439529271e-05, + "loss": 0.9282, + "step": 3996 + }, + { + "epoch": 0.67, + "learning_rate": 1.5050072775577617e-05, + "loss": 0.949, + "step": 3997 + }, + { + "epoch": 0.67, + "learning_rate": 1.5045385622811726e-05, + "loss": 0.883, + "step": 3998 + }, + { + "epoch": 0.67, + "learning_rate": 1.504069698261342e-05, + "loss": 0.8827, + "step": 3999 + }, + { + "epoch": 0.67, + "learning_rate": 1.5036006856364961e-05, + "loss": 0.9018, + "step": 4000 + }, + { + "epoch": 0.67, + "learning_rate": 1.5031315245449053e-05, + "loss": 0.9502, + "step": 4001 + }, + { + "epoch": 0.67, + "learning_rate": 1.5026622151248815e-05, + "loss": 0.9444, + "step": 4002 + }, + { + "epoch": 0.67, + "learning_rate": 1.5021927575147834e-05, + "loss": 0.8398, + "step": 4003 + }, + { + "epoch": 0.67, + "learning_rate": 1.5017231518530118e-05, + "loss": 0.8799, + "step": 4004 + }, + { + "epoch": 0.67, + "learning_rate": 1.501253398278011e-05, + "loss": 0.9303, + "step": 4005 + }, + { + "epoch": 0.67, + "learning_rate": 1.5007834969282695e-05, + "loss": 0.8995, + "step": 4006 + }, + { + "epoch": 0.67, + "learning_rate": 1.5003134479423191e-05, + "loss": 0.9984, + "step": 4007 + }, + { + "epoch": 0.67, + "learning_rate": 1.4998432514587355e-05, + "loss": 0.9389, + "step": 4008 + }, + { + "epoch": 0.67, + "learning_rate": 1.4993729076161367e-05, + "loss": 0.8454, + "step": 4009 + }, + { + "epoch": 0.67, + "learning_rate": 1.498902416553186e-05, + "loss": 0.8636, + "step": 4010 + }, + { + "epoch": 0.67, + "learning_rate": 1.498431778408588e-05, + "loss": 0.9852, + "step": 4011 + }, + { + "epoch": 0.67, + "learning_rate": 1.4979609933210926e-05, + "loss": 0.9201, + "step": 4012 + }, + { + "epoch": 0.67, + "learning_rate": 1.4974900614294917e-05, + "loss": 0.9172, + "step": 4013 + }, + { + "epoch": 0.67, + "learning_rate": 1.4970189828726213e-05, + "loss": 0.8633, + "step": 4014 + }, + { + "epoch": 0.67, + "learning_rate": 1.49654775778936e-05, + "loss": 0.9808, + "step": 4015 + }, + { + "epoch": 0.67, + "learning_rate": 1.4960763863186295e-05, + "loss": 0.9023, + "step": 4016 + }, + { + "epoch": 0.67, + "learning_rate": 1.4956048685993963e-05, + "loss": 0.8409, + "step": 4017 + }, + { + "epoch": 0.67, + "learning_rate": 1.4951332047706676e-05, + "loss": 0.9083, + "step": 4018 + }, + { + "epoch": 0.67, + "learning_rate": 1.4946613949714949e-05, + "loss": 0.9023, + "step": 4019 + }, + { + "epoch": 0.67, + "learning_rate": 1.4941894393409733e-05, + "loss": 0.8659, + "step": 4020 + }, + { + "epoch": 0.67, + "learning_rate": 1.4937173380182398e-05, + "loss": 0.9255, + "step": 4021 + }, + { + "epoch": 0.67, + "learning_rate": 1.4932450911424749e-05, + "loss": 0.9332, + "step": 4022 + }, + { + "epoch": 0.67, + "learning_rate": 1.4927726988529019e-05, + "loss": 0.8931, + "step": 4023 + }, + { + "epoch": 0.67, + "learning_rate": 1.492300161288787e-05, + "loss": 0.891, + "step": 4024 + }, + { + "epoch": 0.67, + "learning_rate": 1.4918274785894392e-05, + "loss": 0.9089, + "step": 4025 + }, + { + "epoch": 0.67, + "learning_rate": 1.4913546508942106e-05, + "loss": 0.9038, + "step": 4026 + }, + { + "epoch": 0.68, + "learning_rate": 1.4908816783424948e-05, + "loss": 0.8677, + "step": 4027 + }, + { + "epoch": 0.68, + "learning_rate": 1.4904085610737302e-05, + "loss": 0.8785, + "step": 4028 + }, + { + "epoch": 0.68, + "learning_rate": 1.4899352992273958e-05, + "loss": 0.918, + "step": 4029 + }, + { + "epoch": 0.68, + "learning_rate": 1.4894618929430151e-05, + "loss": 0.9599, + "step": 4030 + }, + { + "epoch": 0.68, + "learning_rate": 1.4889883423601526e-05, + "loss": 0.9703, + "step": 4031 + }, + { + "epoch": 0.68, + "learning_rate": 1.4885146476184157e-05, + "loss": 0.9541, + "step": 4032 + }, + { + "epoch": 0.68, + "learning_rate": 1.4880408088574551e-05, + "loss": 0.9069, + "step": 4033 + }, + { + "epoch": 0.68, + "learning_rate": 1.4875668262169633e-05, + "loss": 0.8635, + "step": 4034 + }, + { + "epoch": 0.68, + "learning_rate": 1.4870926998366755e-05, + "loss": 0.9574, + "step": 4035 + }, + { + "epoch": 0.68, + "learning_rate": 1.4866184298563682e-05, + "loss": 0.9268, + "step": 4036 + }, + { + "epoch": 0.68, + "learning_rate": 1.4861440164158623e-05, + "loss": 0.9348, + "step": 4037 + }, + { + "epoch": 0.68, + "learning_rate": 1.4856694596550192e-05, + "loss": 0.9073, + "step": 4038 + }, + { + "epoch": 0.68, + "learning_rate": 1.4851947597137434e-05, + "loss": 0.9216, + "step": 4039 + }, + { + "epoch": 0.68, + "learning_rate": 1.4847199167319812e-05, + "loss": 0.8919, + "step": 4040 + }, + { + "epoch": 0.68, + "learning_rate": 1.4842449308497217e-05, + "loss": 0.9217, + "step": 4041 + }, + { + "epoch": 0.68, + "learning_rate": 1.4837698022069953e-05, + "loss": 0.9265, + "step": 4042 + }, + { + "epoch": 0.68, + "learning_rate": 1.483294530943875e-05, + "loss": 0.915, + "step": 4043 + }, + { + "epoch": 0.68, + "learning_rate": 1.4828191172004755e-05, + "loss": 0.8847, + "step": 4044 + }, + { + "epoch": 0.68, + "learning_rate": 1.4823435611169545e-05, + "loss": 0.936, + "step": 4045 + }, + { + "epoch": 0.68, + "learning_rate": 1.48186786283351e-05, + "loss": 0.9215, + "step": 4046 + }, + { + "epoch": 0.68, + "learning_rate": 1.4813920224903838e-05, + "loss": 0.9826, + "step": 4047 + }, + { + "epoch": 0.68, + "learning_rate": 1.4809160402278574e-05, + "loss": 0.8681, + "step": 4048 + }, + { + "epoch": 0.68, + "learning_rate": 1.4804399161862564e-05, + "loss": 0.9334, + "step": 4049 + }, + { + "epoch": 0.68, + "learning_rate": 1.4799636505059463e-05, + "loss": 0.8765, + "step": 4050 + }, + { + "epoch": 0.68, + "learning_rate": 1.479487243327336e-05, + "loss": 0.9539, + "step": 4051 + }, + { + "epoch": 0.68, + "learning_rate": 1.479010694790875e-05, + "loss": 0.956, + "step": 4052 + }, + { + "epoch": 0.68, + "learning_rate": 1.4785340050370542e-05, + "loss": 0.9364, + "step": 4053 + }, + { + "epoch": 0.68, + "learning_rate": 1.4780571742064079e-05, + "loss": 0.9191, + "step": 4054 + }, + { + "epoch": 0.68, + "learning_rate": 1.4775802024395098e-05, + "loss": 0.8921, + "step": 4055 + }, + { + "epoch": 0.68, + "learning_rate": 1.4771030898769766e-05, + "loss": 0.9164, + "step": 4056 + }, + { + "epoch": 0.68, + "learning_rate": 1.4766258366594656e-05, + "loss": 0.8761, + "step": 4057 + }, + { + "epoch": 0.68, + "learning_rate": 1.476148442927677e-05, + "loss": 0.9354, + "step": 4058 + }, + { + "epoch": 0.68, + "learning_rate": 1.4756709088223508e-05, + "loss": 0.8773, + "step": 4059 + }, + { + "epoch": 0.68, + "learning_rate": 1.4751932344842689e-05, + "loss": 0.9596, + "step": 4060 + }, + { + "epoch": 0.68, + "learning_rate": 1.474715420054255e-05, + "loss": 0.8895, + "step": 4061 + }, + { + "epoch": 0.68, + "learning_rate": 1.4742374656731739e-05, + "loss": 0.9299, + "step": 4062 + }, + { + "epoch": 0.68, + "learning_rate": 1.4737593714819314e-05, + "loss": 0.8541, + "step": 4063 + }, + { + "epoch": 0.68, + "learning_rate": 1.4732811376214744e-05, + "loss": 0.8445, + "step": 4064 + }, + { + "epoch": 0.68, + "learning_rate": 1.4728027642327914e-05, + "loss": 1.0096, + "step": 4065 + }, + { + "epoch": 0.68, + "learning_rate": 1.4723242514569122e-05, + "loss": 0.9714, + "step": 4066 + }, + { + "epoch": 0.68, + "learning_rate": 1.4718455994349068e-05, + "loss": 0.9607, + "step": 4067 + }, + { + "epoch": 0.68, + "learning_rate": 1.4713668083078874e-05, + "loss": 0.9482, + "step": 4068 + }, + { + "epoch": 0.68, + "learning_rate": 1.4708878782170062e-05, + "loss": 0.9209, + "step": 4069 + }, + { + "epoch": 0.68, + "learning_rate": 1.4704088093034573e-05, + "loss": 0.901, + "step": 4070 + }, + { + "epoch": 0.68, + "learning_rate": 1.4699296017084743e-05, + "loss": 0.8952, + "step": 4071 + }, + { + "epoch": 0.68, + "learning_rate": 1.4694502555733336e-05, + "loss": 0.3475, + "step": 4072 + }, + { + "epoch": 0.68, + "learning_rate": 1.4689707710393509e-05, + "loss": 0.8827, + "step": 4073 + }, + { + "epoch": 0.68, + "learning_rate": 1.4684911482478833e-05, + "loss": 0.8647, + "step": 4074 + }, + { + "epoch": 0.68, + "learning_rate": 1.4680113873403288e-05, + "loss": 0.8689, + "step": 4075 + }, + { + "epoch": 0.68, + "learning_rate": 1.467531488458126e-05, + "loss": 0.8905, + "step": 4076 + }, + { + "epoch": 0.68, + "learning_rate": 1.4670514517427536e-05, + "loss": 0.9027, + "step": 4077 + }, + { + "epoch": 0.68, + "learning_rate": 1.4665712773357319e-05, + "loss": 0.9562, + "step": 4078 + }, + { + "epoch": 0.68, + "learning_rate": 1.4660909653786212e-05, + "loss": 0.8868, + "step": 4079 + }, + { + "epoch": 0.68, + "learning_rate": 1.4656105160130226e-05, + "loss": 0.9228, + "step": 4080 + }, + { + "epoch": 0.68, + "learning_rate": 1.4651299293805774e-05, + "loss": 0.8978, + "step": 4081 + }, + { + "epoch": 0.68, + "learning_rate": 1.4646492056229675e-05, + "loss": 0.9344, + "step": 4082 + }, + { + "epoch": 0.68, + "learning_rate": 1.4641683448819158e-05, + "loss": 0.3776, + "step": 4083 + }, + { + "epoch": 0.68, + "learning_rate": 1.4636873472991844e-05, + "loss": 0.893, + "step": 4084 + }, + { + "epoch": 0.68, + "learning_rate": 1.4632062130165767e-05, + "loss": 0.9044, + "step": 4085 + }, + { + "epoch": 0.68, + "learning_rate": 1.4627249421759362e-05, + "loss": 0.4264, + "step": 4086 + }, + { + "epoch": 0.69, + "learning_rate": 1.4622435349191469e-05, + "loss": 0.9765, + "step": 4087 + }, + { + "epoch": 0.69, + "learning_rate": 1.4617619913881318e-05, + "loss": 0.3816, + "step": 4088 + }, + { + "epoch": 0.69, + "learning_rate": 1.4612803117248557e-05, + "loss": 0.3401, + "step": 4089 + }, + { + "epoch": 0.69, + "learning_rate": 1.4607984960713225e-05, + "loss": 0.9598, + "step": 4090 + }, + { + "epoch": 0.69, + "learning_rate": 1.4603165445695766e-05, + "loss": 0.8826, + "step": 4091 + }, + { + "epoch": 0.69, + "learning_rate": 1.4598344573617023e-05, + "loss": 0.9394, + "step": 4092 + }, + { + "epoch": 0.69, + "learning_rate": 1.4593522345898237e-05, + "loss": 0.9289, + "step": 4093 + }, + { + "epoch": 0.69, + "learning_rate": 1.4588698763961056e-05, + "loss": 0.927, + "step": 4094 + }, + { + "epoch": 0.69, + "learning_rate": 1.4583873829227522e-05, + "loss": 0.9028, + "step": 4095 + }, + { + "epoch": 0.69, + "learning_rate": 1.4579047543120071e-05, + "loss": 0.3923, + "step": 4096 + }, + { + "epoch": 0.69, + "learning_rate": 1.4574219907061546e-05, + "loss": 0.8885, + "step": 4097 + }, + { + "epoch": 0.69, + "learning_rate": 1.4569390922475186e-05, + "loss": 0.9294, + "step": 4098 + }, + { + "epoch": 0.69, + "learning_rate": 1.4564560590784622e-05, + "loss": 0.9456, + "step": 4099 + }, + { + "epoch": 0.69, + "learning_rate": 1.4559728913413892e-05, + "loss": 0.9102, + "step": 4100 + }, + { + "epoch": 0.69, + "learning_rate": 1.4554895891787417e-05, + "loss": 0.9184, + "step": 4101 + }, + { + "epoch": 0.69, + "learning_rate": 1.4550061527330033e-05, + "loss": 0.9311, + "step": 4102 + }, + { + "epoch": 0.69, + "learning_rate": 1.4545225821466951e-05, + "loss": 0.906, + "step": 4103 + }, + { + "epoch": 0.69, + "learning_rate": 1.4540388775623793e-05, + "loss": 0.8513, + "step": 4104 + }, + { + "epoch": 0.69, + "learning_rate": 1.4535550391226566e-05, + "loss": 0.9321, + "step": 4105 + }, + { + "epoch": 0.69, + "learning_rate": 1.4530710669701684e-05, + "loss": 0.8969, + "step": 4106 + }, + { + "epoch": 0.69, + "learning_rate": 1.4525869612475942e-05, + "loss": 0.8271, + "step": 4107 + }, + { + "epoch": 0.69, + "learning_rate": 1.4521027220976538e-05, + "loss": 0.9202, + "step": 4108 + }, + { + "epoch": 0.69, + "learning_rate": 1.4516183496631052e-05, + "loss": 0.8335, + "step": 4109 + }, + { + "epoch": 0.69, + "learning_rate": 1.4511338440867473e-05, + "loss": 0.9225, + "step": 4110 + }, + { + "epoch": 0.69, + "learning_rate": 1.450649205511417e-05, + "loss": 0.917, + "step": 4111 + }, + { + "epoch": 0.69, + "learning_rate": 1.450164434079991e-05, + "loss": 0.9074, + "step": 4112 + }, + { + "epoch": 0.69, + "learning_rate": 1.4496795299353845e-05, + "loss": 0.9366, + "step": 4113 + }, + { + "epoch": 0.69, + "learning_rate": 1.4491944932205533e-05, + "loss": 0.9437, + "step": 4114 + }, + { + "epoch": 0.69, + "learning_rate": 1.4487093240784902e-05, + "loss": 0.9086, + "step": 4115 + }, + { + "epoch": 0.69, + "learning_rate": 1.4482240226522288e-05, + "loss": 0.9083, + "step": 4116 + }, + { + "epoch": 0.69, + "learning_rate": 1.4477385890848408e-05, + "loss": 0.9686, + "step": 4117 + }, + { + "epoch": 0.69, + "learning_rate": 1.447253023519437e-05, + "loss": 0.9068, + "step": 4118 + }, + { + "epoch": 0.69, + "learning_rate": 1.4467673260991674e-05, + "loss": 0.9098, + "step": 4119 + }, + { + "epoch": 0.69, + "learning_rate": 1.446281496967221e-05, + "loss": 0.8584, + "step": 4120 + }, + { + "epoch": 0.69, + "learning_rate": 1.4457955362668245e-05, + "loss": 0.4081, + "step": 4121 + }, + { + "epoch": 0.69, + "learning_rate": 1.4453094441412448e-05, + "loss": 0.9029, + "step": 4122 + }, + { + "epoch": 0.69, + "learning_rate": 1.4448232207337868e-05, + "loss": 0.384, + "step": 4123 + }, + { + "epoch": 0.69, + "learning_rate": 1.4443368661877939e-05, + "loss": 0.8486, + "step": 4124 + }, + { + "epoch": 0.69, + "learning_rate": 1.443850380646649e-05, + "loss": 0.9335, + "step": 4125 + }, + { + "epoch": 0.69, + "learning_rate": 1.4433637642537728e-05, + "loss": 0.9457, + "step": 4126 + }, + { + "epoch": 0.69, + "learning_rate": 1.4428770171526253e-05, + "loss": 0.848, + "step": 4127 + }, + { + "epoch": 0.69, + "learning_rate": 1.4423901394867041e-05, + "loss": 0.8965, + "step": 4128 + }, + { + "epoch": 0.69, + "learning_rate": 1.4419031313995463e-05, + "loss": 0.8982, + "step": 4129 + }, + { + "epoch": 0.69, + "learning_rate": 1.4414159930347265e-05, + "loss": 0.3983, + "step": 4130 + }, + { + "epoch": 0.69, + "learning_rate": 1.4409287245358586e-05, + "loss": 0.8876, + "step": 4131 + }, + { + "epoch": 0.69, + "learning_rate": 1.4404413260465941e-05, + "loss": 1.0081, + "step": 4132 + }, + { + "epoch": 0.69, + "learning_rate": 1.4399537977106235e-05, + "loss": 0.9634, + "step": 4133 + }, + { + "epoch": 0.69, + "learning_rate": 1.4394661396716749e-05, + "loss": 0.9285, + "step": 4134 + }, + { + "epoch": 0.69, + "learning_rate": 1.4389783520735154e-05, + "loss": 0.9396, + "step": 4135 + }, + { + "epoch": 0.69, + "learning_rate": 1.4384904350599498e-05, + "loss": 0.8968, + "step": 4136 + }, + { + "epoch": 0.69, + "learning_rate": 1.4380023887748209e-05, + "loss": 0.9178, + "step": 4137 + }, + { + "epoch": 0.69, + "learning_rate": 1.4375142133620097e-05, + "loss": 0.9257, + "step": 4138 + }, + { + "epoch": 0.69, + "learning_rate": 1.437025908965436e-05, + "loss": 0.9621, + "step": 4139 + }, + { + "epoch": 0.69, + "learning_rate": 1.4365374757290562e-05, + "loss": 0.9395, + "step": 4140 + }, + { + "epoch": 0.69, + "learning_rate": 1.4360489137968668e-05, + "loss": 0.8673, + "step": 4141 + }, + { + "epoch": 0.69, + "learning_rate": 1.4355602233128996e-05, + "loss": 0.9367, + "step": 4142 + }, + { + "epoch": 0.69, + "learning_rate": 1.4350714044212262e-05, + "loss": 0.9636, + "step": 4143 + }, + { + "epoch": 0.69, + "learning_rate": 1.4345824572659556e-05, + "loss": 0.9267, + "step": 4144 + }, + { + "epoch": 0.69, + "learning_rate": 1.434093381991235e-05, + "loss": 0.9366, + "step": 4145 + }, + { + "epoch": 0.7, + "learning_rate": 1.433604178741248e-05, + "loss": 0.8732, + "step": 4146 + }, + { + "epoch": 0.7, + "learning_rate": 1.433114847660217e-05, + "loss": 0.898, + "step": 4147 + }, + { + "epoch": 0.7, + "learning_rate": 1.4326253888924027e-05, + "loss": 0.8883, + "step": 4148 + }, + { + "epoch": 0.7, + "learning_rate": 1.432135802582102e-05, + "loss": 0.9366, + "step": 4149 + }, + { + "epoch": 0.7, + "learning_rate": 1.4316460888736496e-05, + "loss": 0.9198, + "step": 4150 + }, + { + "epoch": 0.7, + "learning_rate": 1.4311562479114193e-05, + "loss": 0.8998, + "step": 4151 + }, + { + "epoch": 0.7, + "learning_rate": 1.430666279839821e-05, + "loss": 0.8368, + "step": 4152 + }, + { + "epoch": 0.7, + "learning_rate": 1.4301761848033018e-05, + "loss": 0.8923, + "step": 4153 + }, + { + "epoch": 0.7, + "learning_rate": 1.4296859629463473e-05, + "loss": 0.9292, + "step": 4154 + }, + { + "epoch": 0.7, + "learning_rate": 1.4291956144134802e-05, + "loss": 0.948, + "step": 4155 + }, + { + "epoch": 0.7, + "learning_rate": 1.4287051393492601e-05, + "loss": 0.8997, + "step": 4156 + }, + { + "epoch": 0.7, + "learning_rate": 1.4282145378982844e-05, + "loss": 0.9488, + "step": 4157 + }, + { + "epoch": 0.7, + "learning_rate": 1.4277238102051871e-05, + "loss": 0.9373, + "step": 4158 + }, + { + "epoch": 0.7, + "learning_rate": 1.4272329564146403e-05, + "loss": 0.9496, + "step": 4159 + }, + { + "epoch": 0.7, + "learning_rate": 1.4267419766713528e-05, + "loss": 0.8592, + "step": 4160 + }, + { + "epoch": 0.7, + "learning_rate": 1.4262508711200702e-05, + "loss": 0.8985, + "step": 4161 + }, + { + "epoch": 0.7, + "learning_rate": 1.4257596399055757e-05, + "loss": 0.8792, + "step": 4162 + }, + { + "epoch": 0.7, + "learning_rate": 1.4252682831726897e-05, + "loss": 0.8697, + "step": 4163 + }, + { + "epoch": 0.7, + "learning_rate": 1.424776801066269e-05, + "loss": 0.8949, + "step": 4164 + }, + { + "epoch": 0.7, + "learning_rate": 1.4242851937312078e-05, + "loss": 0.9411, + "step": 4165 + }, + { + "epoch": 0.7, + "learning_rate": 1.423793461312437e-05, + "loss": 0.8736, + "step": 4166 + }, + { + "epoch": 0.7, + "learning_rate": 1.4233016039549243e-05, + "loss": 0.9207, + "step": 4167 + }, + { + "epoch": 0.7, + "learning_rate": 1.4228096218036746e-05, + "loss": 0.9112, + "step": 4168 + }, + { + "epoch": 0.7, + "learning_rate": 1.4223175150037297e-05, + "loss": 0.9261, + "step": 4169 + }, + { + "epoch": 0.7, + "learning_rate": 1.421825283700167e-05, + "loss": 0.9674, + "step": 4170 + }, + { + "epoch": 0.7, + "learning_rate": 1.421332928038102e-05, + "loss": 0.8928, + "step": 4171 + }, + { + "epoch": 0.7, + "learning_rate": 1.4208404481626863e-05, + "loss": 0.892, + "step": 4172 + }, + { + "epoch": 0.7, + "learning_rate": 1.420347844219108e-05, + "loss": 0.9968, + "step": 4173 + }, + { + "epoch": 0.7, + "learning_rate": 1.4198551163525918e-05, + "loss": 0.9343, + "step": 4174 + }, + { + "epoch": 0.7, + "learning_rate": 1.4193622647083994e-05, + "loss": 0.9363, + "step": 4175 + }, + { + "epoch": 0.7, + "learning_rate": 1.4188692894318279e-05, + "loss": 0.9462, + "step": 4176 + }, + { + "epoch": 0.7, + "learning_rate": 1.4183761906682125e-05, + "loss": 0.8944, + "step": 4177 + }, + { + "epoch": 0.7, + "learning_rate": 1.417882968562923e-05, + "loss": 0.933, + "step": 4178 + }, + { + "epoch": 0.7, + "learning_rate": 1.417389623261367e-05, + "loss": 0.9053, + "step": 4179 + }, + { + "epoch": 0.7, + "learning_rate": 1.4168961549089875e-05, + "loss": 0.8624, + "step": 4180 + }, + { + "epoch": 0.7, + "learning_rate": 1.4164025636512644e-05, + "loss": 0.897, + "step": 4181 + }, + { + "epoch": 0.7, + "learning_rate": 1.4159088496337133e-05, + "loss": 0.887, + "step": 4182 + }, + { + "epoch": 0.7, + "learning_rate": 1.4154150130018867e-05, + "loss": 0.8922, + "step": 4183 + }, + { + "epoch": 0.7, + "learning_rate": 1.414921053901372e-05, + "loss": 0.9152, + "step": 4184 + }, + { + "epoch": 0.7, + "learning_rate": 1.4144269724777946e-05, + "loss": 0.9279, + "step": 4185 + }, + { + "epoch": 0.7, + "learning_rate": 1.4139327688768139e-05, + "loss": 0.8919, + "step": 4186 + }, + { + "epoch": 0.7, + "learning_rate": 1.4134384432441266e-05, + "loss": 0.7794, + "step": 4187 + }, + { + "epoch": 0.7, + "learning_rate": 1.4129439957254655e-05, + "loss": 0.3758, + "step": 4188 + }, + { + "epoch": 0.7, + "learning_rate": 1.4124494264665982e-05, + "loss": 0.9297, + "step": 4189 + }, + { + "epoch": 0.7, + "learning_rate": 1.4119547356133291e-05, + "loss": 0.8762, + "step": 4190 + }, + { + "epoch": 0.7, + "learning_rate": 1.4114599233114988e-05, + "loss": 0.8213, + "step": 4191 + }, + { + "epoch": 0.7, + "learning_rate": 1.4109649897069826e-05, + "loss": 0.9378, + "step": 4192 + }, + { + "epoch": 0.7, + "learning_rate": 1.4104699349456918e-05, + "loss": 0.9858, + "step": 4193 + }, + { + "epoch": 0.7, + "learning_rate": 1.4099747591735742e-05, + "loss": 0.9069, + "step": 4194 + }, + { + "epoch": 0.7, + "learning_rate": 1.4094794625366129e-05, + "loss": 0.8866, + "step": 4195 + }, + { + "epoch": 0.7, + "learning_rate": 1.4089840451808261e-05, + "loss": 0.9204, + "step": 4196 + }, + { + "epoch": 0.7, + "learning_rate": 1.4084885072522683e-05, + "loss": 0.8895, + "step": 4197 + }, + { + "epoch": 0.7, + "learning_rate": 1.4079928488970291e-05, + "loss": 0.8837, + "step": 4198 + }, + { + "epoch": 0.7, + "learning_rate": 1.407497070261234e-05, + "loss": 0.8991, + "step": 4199 + }, + { + "epoch": 0.7, + "learning_rate": 1.4070011714910437e-05, + "loss": 0.8958, + "step": 4200 + }, + { + "epoch": 0.7, + "learning_rate": 1.4065051527326539e-05, + "loss": 0.9576, + "step": 4201 + }, + { + "epoch": 0.7, + "learning_rate": 1.4060090141322969e-05, + "loss": 0.3944, + "step": 4202 + }, + { + "epoch": 0.7, + "learning_rate": 1.4055127558362385e-05, + "loss": 1.0009, + "step": 4203 + }, + { + "epoch": 0.7, + "learning_rate": 1.4050163779907817e-05, + "loss": 0.922, + "step": 4204 + }, + { + "epoch": 0.7, + "learning_rate": 1.4045198807422632e-05, + "loss": 0.8905, + "step": 4205 + }, + { + "epoch": 0.71, + "learning_rate": 1.4040232642370569e-05, + "loss": 0.9082, + "step": 4206 + }, + { + "epoch": 0.71, + "learning_rate": 1.4035265286215687e-05, + "loss": 0.9163, + "step": 4207 + }, + { + "epoch": 0.71, + "learning_rate": 1.4030296740422427e-05, + "loss": 0.9213, + "step": 4208 + }, + { + "epoch": 0.71, + "learning_rate": 1.4025327006455563e-05, + "loss": 0.9162, + "step": 4209 + }, + { + "epoch": 0.71, + "learning_rate": 1.402035608578023e-05, + "loss": 0.9169, + "step": 4210 + }, + { + "epoch": 0.71, + "learning_rate": 1.4015383979861898e-05, + "loss": 0.9048, + "step": 4211 + }, + { + "epoch": 0.71, + "learning_rate": 1.4010410690166406e-05, + "loss": 0.8507, + "step": 4212 + }, + { + "epoch": 0.71, + "learning_rate": 1.4005436218159927e-05, + "loss": 0.8774, + "step": 4213 + }, + { + "epoch": 0.71, + "learning_rate": 1.4000460565308984e-05, + "loss": 0.8826, + "step": 4214 + }, + { + "epoch": 0.71, + "learning_rate": 1.3995483733080457e-05, + "loss": 0.9654, + "step": 4215 + }, + { + "epoch": 0.71, + "learning_rate": 1.3990505722941565e-05, + "loss": 0.9259, + "step": 4216 + }, + { + "epoch": 0.71, + "learning_rate": 1.3985526536359881e-05, + "loss": 0.8904, + "step": 4217 + }, + { + "epoch": 0.71, + "learning_rate": 1.3980546174803318e-05, + "loss": 0.9072, + "step": 4218 + }, + { + "epoch": 0.71, + "learning_rate": 1.3975564639740138e-05, + "loss": 0.8697, + "step": 4219 + }, + { + "epoch": 0.71, + "learning_rate": 1.3970581932638951e-05, + "loss": 0.9355, + "step": 4220 + }, + { + "epoch": 0.71, + "learning_rate": 1.3965598054968715e-05, + "loss": 0.9289, + "step": 4221 + }, + { + "epoch": 0.71, + "learning_rate": 1.3960613008198722e-05, + "loss": 0.9715, + "step": 4222 + }, + { + "epoch": 0.71, + "learning_rate": 1.395562679379862e-05, + "loss": 0.9723, + "step": 4223 + }, + { + "epoch": 0.71, + "learning_rate": 1.3950639413238394e-05, + "loss": 0.898, + "step": 4224 + }, + { + "epoch": 0.71, + "learning_rate": 1.3945650867988382e-05, + "loss": 0.9348, + "step": 4225 + }, + { + "epoch": 0.71, + "learning_rate": 1.3940661159519255e-05, + "loss": 0.8832, + "step": 4226 + }, + { + "epoch": 0.71, + "learning_rate": 1.393567028930203e-05, + "loss": 0.4267, + "step": 4227 + }, + { + "epoch": 0.71, + "learning_rate": 1.393067825880807e-05, + "loss": 0.8888, + "step": 4228 + }, + { + "epoch": 0.71, + "learning_rate": 1.3925685069509079e-05, + "loss": 0.9216, + "step": 4229 + }, + { + "epoch": 0.71, + "learning_rate": 1.3920690722877099e-05, + "loss": 0.9762, + "step": 4230 + }, + { + "epoch": 0.71, + "learning_rate": 1.3915695220384514e-05, + "loss": 0.8969, + "step": 4231 + }, + { + "epoch": 0.71, + "learning_rate": 1.3910698563504057e-05, + "loss": 0.9715, + "step": 4232 + }, + { + "epoch": 0.71, + "learning_rate": 1.3905700753708786e-05, + "loss": 0.9196, + "step": 4233 + }, + { + "epoch": 0.71, + "learning_rate": 1.3900701792472113e-05, + "loss": 0.952, + "step": 4234 + }, + { + "epoch": 0.71, + "learning_rate": 1.3895701681267784e-05, + "loss": 0.8888, + "step": 4235 + }, + { + "epoch": 0.71, + "learning_rate": 1.3890700421569882e-05, + "loss": 0.8949, + "step": 4236 + }, + { + "epoch": 0.71, + "learning_rate": 1.3885698014852832e-05, + "loss": 0.9393, + "step": 4237 + }, + { + "epoch": 0.71, + "learning_rate": 1.3880694462591397e-05, + "loss": 0.9682, + "step": 4238 + }, + { + "epoch": 0.71, + "learning_rate": 1.3875689766260671e-05, + "loss": 0.9365, + "step": 4239 + }, + { + "epoch": 0.71, + "learning_rate": 1.3870683927336097e-05, + "loss": 0.9234, + "step": 4240 + }, + { + "epoch": 0.71, + "learning_rate": 1.3865676947293445e-05, + "loss": 0.8865, + "step": 4241 + }, + { + "epoch": 0.71, + "learning_rate": 1.3860668827608827e-05, + "loss": 0.9325, + "step": 4242 + }, + { + "epoch": 0.71, + "learning_rate": 1.3855659569758689e-05, + "loss": 0.9537, + "step": 4243 + }, + { + "epoch": 0.71, + "learning_rate": 1.385064917521981e-05, + "loss": 0.9135, + "step": 4244 + }, + { + "epoch": 0.71, + "learning_rate": 1.384563764546931e-05, + "loss": 0.9359, + "step": 4245 + }, + { + "epoch": 0.71, + "learning_rate": 1.384062498198464e-05, + "loss": 0.9415, + "step": 4246 + }, + { + "epoch": 0.71, + "learning_rate": 1.383561118624358e-05, + "loss": 0.8646, + "step": 4247 + }, + { + "epoch": 0.71, + "learning_rate": 1.3830596259724257e-05, + "loss": 0.8833, + "step": 4248 + }, + { + "epoch": 0.71, + "learning_rate": 1.3825580203905122e-05, + "loss": 0.897, + "step": 4249 + }, + { + "epoch": 0.71, + "learning_rate": 1.3820563020264958e-05, + "loss": 0.8507, + "step": 4250 + }, + { + "epoch": 0.71, + "learning_rate": 1.3815544710282885e-05, + "loss": 0.8571, + "step": 4251 + }, + { + "epoch": 0.71, + "learning_rate": 1.3810525275438353e-05, + "loss": 0.8414, + "step": 4252 + }, + { + "epoch": 0.71, + "learning_rate": 1.3805504717211143e-05, + "loss": 0.9082, + "step": 4253 + }, + { + "epoch": 0.71, + "learning_rate": 1.3800483037081369e-05, + "loss": 0.875, + "step": 4254 + }, + { + "epoch": 0.71, + "learning_rate": 1.3795460236529472e-05, + "loss": 0.8988, + "step": 4255 + }, + { + "epoch": 0.71, + "learning_rate": 1.379043631703623e-05, + "loss": 0.9047, + "step": 4256 + }, + { + "epoch": 0.71, + "learning_rate": 1.3785411280082747e-05, + "loss": 0.8672, + "step": 4257 + }, + { + "epoch": 0.71, + "learning_rate": 1.3780385127150454e-05, + "loss": 0.9226, + "step": 4258 + }, + { + "epoch": 0.71, + "learning_rate": 1.3775357859721112e-05, + "loss": 0.9621, + "step": 4259 + }, + { + "epoch": 0.71, + "learning_rate": 1.3770329479276815e-05, + "loss": 0.8995, + "step": 4260 + }, + { + "epoch": 0.71, + "learning_rate": 1.3765299987299985e-05, + "loss": 0.825, + "step": 4261 + }, + { + "epoch": 0.71, + "learning_rate": 1.376026938527336e-05, + "loss": 0.937, + "step": 4262 + }, + { + "epoch": 0.71, + "learning_rate": 1.3755237674680024e-05, + "loss": 0.9221, + "step": 4263 + }, + { + "epoch": 0.71, + "learning_rate": 1.375020485700337e-05, + "loss": 0.418, + "step": 4264 + }, + { + "epoch": 0.72, + "learning_rate": 1.374517093372713e-05, + "loss": 0.8512, + "step": 4265 + }, + { + "epoch": 0.72, + "learning_rate": 1.3740135906335355e-05, + "loss": 0.9363, + "step": 4266 + }, + { + "epoch": 0.72, + "learning_rate": 1.3735099776312426e-05, + "loss": 0.962, + "step": 4267 + }, + { + "epoch": 0.72, + "learning_rate": 1.3730062545143041e-05, + "loss": 0.8808, + "step": 4268 + }, + { + "epoch": 0.72, + "learning_rate": 1.3725024214312237e-05, + "loss": 0.9497, + "step": 4269 + }, + { + "epoch": 0.72, + "learning_rate": 1.3719984785305359e-05, + "loss": 0.9086, + "step": 4270 + }, + { + "epoch": 0.72, + "learning_rate": 1.3714944259608087e-05, + "loss": 0.9155, + "step": 4271 + }, + { + "epoch": 0.72, + "learning_rate": 1.3709902638706418e-05, + "loss": 0.3825, + "step": 4272 + }, + { + "epoch": 0.72, + "learning_rate": 1.370485992408668e-05, + "loss": 0.411, + "step": 4273 + }, + { + "epoch": 0.72, + "learning_rate": 1.3699816117235512e-05, + "loss": 0.3918, + "step": 4274 + }, + { + "epoch": 0.72, + "learning_rate": 1.3694771219639886e-05, + "loss": 0.9045, + "step": 4275 + }, + { + "epoch": 0.72, + "learning_rate": 1.3689725232787087e-05, + "loss": 0.8572, + "step": 4276 + }, + { + "epoch": 0.72, + "learning_rate": 1.3684678158164725e-05, + "loss": 0.9638, + "step": 4277 + }, + { + "epoch": 0.72, + "learning_rate": 1.367962999726073e-05, + "loss": 0.993, + "step": 4278 + }, + { + "epoch": 0.72, + "learning_rate": 1.3674580751563357e-05, + "loss": 0.9382, + "step": 4279 + }, + { + "epoch": 0.72, + "learning_rate": 1.366953042256117e-05, + "loss": 0.9281, + "step": 4280 + }, + { + "epoch": 0.72, + "learning_rate": 1.3664479011743063e-05, + "loss": 0.8248, + "step": 4281 + }, + { + "epoch": 0.72, + "learning_rate": 1.3659426520598245e-05, + "loss": 0.9354, + "step": 4282 + }, + { + "epoch": 0.72, + "learning_rate": 1.365437295061624e-05, + "loss": 0.9389, + "step": 4283 + }, + { + "epoch": 0.72, + "learning_rate": 1.3649318303286893e-05, + "loss": 0.9343, + "step": 4284 + }, + { + "epoch": 0.72, + "learning_rate": 1.3644262580100372e-05, + "loss": 0.9138, + "step": 4285 + }, + { + "epoch": 0.72, + "learning_rate": 1.3639205782547155e-05, + "loss": 0.8894, + "step": 4286 + }, + { + "epoch": 0.72, + "learning_rate": 1.3634147912118035e-05, + "loss": 0.9582, + "step": 4287 + }, + { + "epoch": 0.72, + "learning_rate": 1.3629088970304127e-05, + "loss": 0.902, + "step": 4288 + }, + { + "epoch": 0.72, + "learning_rate": 1.3624028958596861e-05, + "loss": 0.8511, + "step": 4289 + }, + { + "epoch": 0.72, + "learning_rate": 1.3618967878487983e-05, + "loss": 0.4023, + "step": 4290 + }, + { + "epoch": 0.72, + "learning_rate": 1.361390573146955e-05, + "loss": 0.8886, + "step": 4291 + }, + { + "epoch": 0.72, + "learning_rate": 1.3608842519033934e-05, + "loss": 1.0119, + "step": 4292 + }, + { + "epoch": 0.72, + "learning_rate": 1.3603778242673826e-05, + "loss": 0.9525, + "step": 4293 + }, + { + "epoch": 0.72, + "learning_rate": 1.359871290388223e-05, + "loss": 0.9066, + "step": 4294 + }, + { + "epoch": 0.72, + "learning_rate": 1.359364650415245e-05, + "loss": 0.3854, + "step": 4295 + }, + { + "epoch": 0.72, + "learning_rate": 1.3588579044978127e-05, + "loss": 0.9167, + "step": 4296 + }, + { + "epoch": 0.72, + "learning_rate": 1.3583510527853193e-05, + "loss": 0.9636, + "step": 4297 + }, + { + "epoch": 0.72, + "learning_rate": 1.3578440954271904e-05, + "loss": 0.8791, + "step": 4298 + }, + { + "epoch": 0.72, + "learning_rate": 1.3573370325728818e-05, + "loss": 0.894, + "step": 4299 + }, + { + "epoch": 0.72, + "learning_rate": 1.3568298643718815e-05, + "loss": 0.9524, + "step": 4300 + }, + { + "epoch": 0.72, + "learning_rate": 1.3563225909737076e-05, + "loss": 0.9032, + "step": 4301 + }, + { + "epoch": 0.72, + "learning_rate": 1.3558152125279099e-05, + "loss": 0.9385, + "step": 4302 + }, + { + "epoch": 0.72, + "learning_rate": 1.3553077291840687e-05, + "loss": 0.9559, + "step": 4303 + }, + { + "epoch": 0.72, + "learning_rate": 1.3548001410917949e-05, + "loss": 0.8525, + "step": 4304 + }, + { + "epoch": 0.72, + "learning_rate": 1.3542924484007317e-05, + "loss": 0.9175, + "step": 4305 + }, + { + "epoch": 0.72, + "learning_rate": 1.3537846512605515e-05, + "loss": 0.8876, + "step": 4306 + }, + { + "epoch": 0.72, + "learning_rate": 1.3532767498209589e-05, + "loss": 0.8536, + "step": 4307 + }, + { + "epoch": 0.72, + "learning_rate": 1.3527687442316877e-05, + "loss": 0.9124, + "step": 4308 + }, + { + "epoch": 0.72, + "learning_rate": 1.352260634642504e-05, + "loss": 0.9588, + "step": 4309 + }, + { + "epoch": 0.72, + "learning_rate": 1.3517524212032033e-05, + "loss": 0.9044, + "step": 4310 + }, + { + "epoch": 0.72, + "learning_rate": 1.3512441040636126e-05, + "loss": 0.9083, + "step": 4311 + }, + { + "epoch": 0.72, + "learning_rate": 1.3507356833735887e-05, + "loss": 0.9729, + "step": 4312 + }, + { + "epoch": 0.72, + "learning_rate": 1.3502271592830198e-05, + "loss": 0.9473, + "step": 4313 + }, + { + "epoch": 0.72, + "learning_rate": 1.3497185319418238e-05, + "loss": 0.935, + "step": 4314 + }, + { + "epoch": 0.72, + "learning_rate": 1.3492098014999497e-05, + "loss": 0.9204, + "step": 4315 + }, + { + "epoch": 0.72, + "learning_rate": 1.3487009681073758e-05, + "loss": 0.8797, + "step": 4316 + }, + { + "epoch": 0.72, + "learning_rate": 1.3481920319141123e-05, + "loss": 0.9686, + "step": 4317 + }, + { + "epoch": 0.72, + "learning_rate": 1.3476829930701986e-05, + "loss": 0.9989, + "step": 4318 + }, + { + "epoch": 0.72, + "learning_rate": 1.347173851725705e-05, + "loss": 0.8883, + "step": 4319 + }, + { + "epoch": 0.72, + "learning_rate": 1.3466646080307309e-05, + "loss": 0.9444, + "step": 4320 + }, + { + "epoch": 0.72, + "learning_rate": 1.3461552621354073e-05, + "loss": 0.9653, + "step": 4321 + }, + { + "epoch": 0.72, + "learning_rate": 1.3456458141898946e-05, + "loss": 0.9293, + "step": 4322 + }, + { + "epoch": 0.72, + "learning_rate": 1.3451362643443832e-05, + "loss": 0.9118, + "step": 4323 + }, + { + "epoch": 0.72, + "learning_rate": 1.344626612749094e-05, + "loss": 0.9093, + "step": 4324 + }, + { + "epoch": 0.73, + "learning_rate": 1.3441168595542773e-05, + "loss": 0.9377, + "step": 4325 + }, + { + "epoch": 0.73, + "learning_rate": 1.3436070049102139e-05, + "loss": 0.9103, + "step": 4326 + }, + { + "epoch": 0.73, + "learning_rate": 1.343097048967214e-05, + "loss": 0.8467, + "step": 4327 + }, + { + "epoch": 0.73, + "learning_rate": 1.3425869918756182e-05, + "loss": 0.8705, + "step": 4328 + }, + { + "epoch": 0.73, + "learning_rate": 1.3420768337857967e-05, + "loss": 0.9194, + "step": 4329 + }, + { + "epoch": 0.73, + "learning_rate": 1.3415665748481494e-05, + "loss": 0.8466, + "step": 4330 + }, + { + "epoch": 0.73, + "learning_rate": 1.3410562152131059e-05, + "loss": 0.9015, + "step": 4331 + }, + { + "epoch": 0.73, + "learning_rate": 1.3405457550311256e-05, + "loss": 0.9299, + "step": 4332 + }, + { + "epoch": 0.73, + "learning_rate": 1.340035194452697e-05, + "loss": 0.8896, + "step": 4333 + }, + { + "epoch": 0.73, + "learning_rate": 1.3395245336283398e-05, + "loss": 0.9948, + "step": 4334 + }, + { + "epoch": 0.73, + "learning_rate": 1.339013772708601e-05, + "loss": 0.9186, + "step": 4335 + }, + { + "epoch": 0.73, + "learning_rate": 1.338502911844059e-05, + "loss": 0.9109, + "step": 4336 + }, + { + "epoch": 0.73, + "learning_rate": 1.3379919511853205e-05, + "loss": 0.8647, + "step": 4337 + }, + { + "epoch": 0.73, + "learning_rate": 1.3374808908830226e-05, + "loss": 0.9416, + "step": 4338 + }, + { + "epoch": 0.73, + "learning_rate": 1.3369697310878305e-05, + "loss": 0.9105, + "step": 4339 + }, + { + "epoch": 0.73, + "learning_rate": 1.33645847195044e-05, + "loss": 0.9513, + "step": 4340 + }, + { + "epoch": 0.73, + "learning_rate": 1.335947113621575e-05, + "loss": 0.9208, + "step": 4341 + }, + { + "epoch": 0.73, + "learning_rate": 1.3354356562519901e-05, + "loss": 0.8686, + "step": 4342 + }, + { + "epoch": 0.73, + "learning_rate": 1.3349240999924678e-05, + "loss": 0.8881, + "step": 4343 + }, + { + "epoch": 0.73, + "learning_rate": 1.3344124449938205e-05, + "loss": 0.9223, + "step": 4344 + }, + { + "epoch": 0.73, + "learning_rate": 1.333900691406889e-05, + "loss": 0.8525, + "step": 4345 + }, + { + "epoch": 0.73, + "learning_rate": 1.3333888393825438e-05, + "loss": 0.916, + "step": 4346 + }, + { + "epoch": 0.73, + "learning_rate": 1.3328768890716846e-05, + "loss": 0.9328, + "step": 4347 + }, + { + "epoch": 0.73, + "learning_rate": 1.3323648406252392e-05, + "loss": 0.9196, + "step": 4348 + }, + { + "epoch": 0.73, + "learning_rate": 1.331852694194165e-05, + "loss": 0.9868, + "step": 4349 + }, + { + "epoch": 0.73, + "learning_rate": 1.3313404499294482e-05, + "loss": 0.9141, + "step": 4350 + }, + { + "epoch": 0.73, + "learning_rate": 1.3308281079821036e-05, + "loss": 0.8962, + "step": 4351 + }, + { + "epoch": 0.73, + "learning_rate": 1.3303156685031753e-05, + "loss": 0.8645, + "step": 4352 + }, + { + "epoch": 0.73, + "learning_rate": 1.3298031316437354e-05, + "loss": 0.8806, + "step": 4353 + }, + { + "epoch": 0.73, + "learning_rate": 1.3292904975548852e-05, + "loss": 0.9202, + "step": 4354 + }, + { + "epoch": 0.73, + "learning_rate": 1.3287777663877552e-05, + "loss": 0.9579, + "step": 4355 + }, + { + "epoch": 0.73, + "learning_rate": 1.3282649382935028e-05, + "loss": 0.9433, + "step": 4356 + }, + { + "epoch": 0.73, + "learning_rate": 1.3277520134233161e-05, + "loss": 0.9513, + "step": 4357 + }, + { + "epoch": 0.73, + "learning_rate": 1.32723899192841e-05, + "loss": 0.9468, + "step": 4358 + }, + { + "epoch": 0.73, + "learning_rate": 1.3267258739600293e-05, + "loss": 0.9247, + "step": 4359 + }, + { + "epoch": 0.73, + "learning_rate": 1.3262126596694458e-05, + "loss": 0.9044, + "step": 4360 + }, + { + "epoch": 0.73, + "learning_rate": 1.3256993492079606e-05, + "loss": 0.9517, + "step": 4361 + }, + { + "epoch": 0.73, + "learning_rate": 1.3251859427269037e-05, + "loss": 0.933, + "step": 4362 + }, + { + "epoch": 0.73, + "learning_rate": 1.3246724403776319e-05, + "loss": 0.8996, + "step": 4363 + }, + { + "epoch": 0.73, + "learning_rate": 1.324158842311531e-05, + "loss": 0.9028, + "step": 4364 + }, + { + "epoch": 0.73, + "learning_rate": 1.3236451486800157e-05, + "loss": 0.9242, + "step": 4365 + }, + { + "epoch": 0.73, + "learning_rate": 1.3231313596345282e-05, + "loss": 0.8941, + "step": 4366 + }, + { + "epoch": 0.73, + "learning_rate": 1.3226174753265381e-05, + "loss": 0.9247, + "step": 4367 + }, + { + "epoch": 0.73, + "learning_rate": 1.3221034959075447e-05, + "loss": 0.8376, + "step": 4368 + }, + { + "epoch": 0.73, + "learning_rate": 1.321589421529074e-05, + "loss": 0.9214, + "step": 4369 + }, + { + "epoch": 0.73, + "learning_rate": 1.3210752523426811e-05, + "loss": 0.9569, + "step": 4370 + }, + { + "epoch": 0.73, + "learning_rate": 1.3205609884999477e-05, + "loss": 0.9084, + "step": 4371 + }, + { + "epoch": 0.73, + "learning_rate": 1.3200466301524848e-05, + "loss": 0.9441, + "step": 4372 + }, + { + "epoch": 0.73, + "learning_rate": 1.3195321774519298e-05, + "loss": 0.9149, + "step": 4373 + }, + { + "epoch": 0.73, + "learning_rate": 1.3190176305499497e-05, + "loss": 0.9299, + "step": 4374 + }, + { + "epoch": 0.73, + "learning_rate": 1.3185029895982377e-05, + "loss": 0.917, + "step": 4375 + }, + { + "epoch": 0.73, + "learning_rate": 1.3179882547485156e-05, + "loss": 0.9337, + "step": 4376 + }, + { + "epoch": 0.73, + "learning_rate": 1.3174734261525321e-05, + "loss": 0.9471, + "step": 4377 + }, + { + "epoch": 0.73, + "learning_rate": 1.3169585039620651e-05, + "loss": 0.9037, + "step": 4378 + }, + { + "epoch": 0.73, + "learning_rate": 1.3164434883289178e-05, + "loss": 0.8847, + "step": 4379 + }, + { + "epoch": 0.73, + "learning_rate": 1.3159283794049234e-05, + "loss": 0.8631, + "step": 4380 + }, + { + "epoch": 0.73, + "learning_rate": 1.31541317734194e-05, + "loss": 0.9757, + "step": 4381 + }, + { + "epoch": 0.73, + "learning_rate": 1.3148978822918562e-05, + "loss": 0.8552, + "step": 4382 + }, + { + "epoch": 0.73, + "learning_rate": 1.3143824944065848e-05, + "loss": 0.9023, + "step": 4383 + }, + { + "epoch": 0.73, + "learning_rate": 1.3138670138380687e-05, + "loss": 0.8888, + "step": 4384 + }, + { + "epoch": 0.74, + "learning_rate": 1.3133514407382763e-05, + "loss": 0.4013, + "step": 4385 + }, + { + "epoch": 0.74, + "learning_rate": 1.312835775259204e-05, + "loss": 0.36, + "step": 4386 + }, + { + "epoch": 0.74, + "learning_rate": 1.3123200175528756e-05, + "loss": 0.3889, + "step": 4387 + }, + { + "epoch": 0.74, + "learning_rate": 1.311804167771342e-05, + "loss": 0.9698, + "step": 4388 + }, + { + "epoch": 0.74, + "learning_rate": 1.3112882260666805e-05, + "loss": 0.9062, + "step": 4389 + }, + { + "epoch": 0.74, + "learning_rate": 1.3107721925909967e-05, + "loss": 0.8346, + "step": 4390 + }, + { + "epoch": 0.74, + "learning_rate": 1.3102560674964226e-05, + "loss": 0.8887, + "step": 4391 + }, + { + "epoch": 0.74, + "learning_rate": 1.309739850935117e-05, + "loss": 0.8882, + "step": 4392 + }, + { + "epoch": 0.74, + "learning_rate": 1.3092235430592659e-05, + "loss": 0.9403, + "step": 4393 + }, + { + "epoch": 0.74, + "learning_rate": 1.3087071440210823e-05, + "loss": 0.8771, + "step": 4394 + }, + { + "epoch": 0.74, + "learning_rate": 1.3081906539728066e-05, + "loss": 0.9275, + "step": 4395 + }, + { + "epoch": 0.74, + "learning_rate": 1.3076740730667048e-05, + "loss": 0.9417, + "step": 4396 + }, + { + "epoch": 0.74, + "learning_rate": 1.3071574014550706e-05, + "loss": 0.904, + "step": 4397 + }, + { + "epoch": 0.74, + "learning_rate": 1.3066406392902239e-05, + "loss": 0.883, + "step": 4398 + }, + { + "epoch": 0.74, + "learning_rate": 1.306123786724512e-05, + "loss": 0.8862, + "step": 4399 + }, + { + "epoch": 0.74, + "learning_rate": 1.3056068439103084e-05, + "loss": 0.9551, + "step": 4400 + }, + { + "epoch": 0.74, + "learning_rate": 1.3050898110000132e-05, + "loss": 0.8759, + "step": 4401 + }, + { + "epoch": 0.74, + "learning_rate": 1.3045726881460526e-05, + "loss": 0.8258, + "step": 4402 + }, + { + "epoch": 0.74, + "learning_rate": 1.3040554755008805e-05, + "loss": 0.8675, + "step": 4403 + }, + { + "epoch": 0.74, + "learning_rate": 1.3035381732169762e-05, + "loss": 0.3693, + "step": 4404 + }, + { + "epoch": 0.74, + "learning_rate": 1.3030207814468466e-05, + "loss": 0.8684, + "step": 4405 + }, + { + "epoch": 0.74, + "learning_rate": 1.3025033003430226e-05, + "loss": 0.9148, + "step": 4406 + }, + { + "epoch": 0.74, + "learning_rate": 1.3019857300580645e-05, + "loss": 0.9092, + "step": 4407 + }, + { + "epoch": 0.74, + "learning_rate": 1.3014680707445571e-05, + "loss": 0.8852, + "step": 4408 + }, + { + "epoch": 0.74, + "learning_rate": 1.3009503225551114e-05, + "loss": 0.9268, + "step": 4409 + }, + { + "epoch": 0.74, + "learning_rate": 1.3004324856423652e-05, + "loss": 0.9419, + "step": 4410 + }, + { + "epoch": 0.74, + "learning_rate": 1.2999145601589822e-05, + "loss": 0.9215, + "step": 4411 + }, + { + "epoch": 0.74, + "learning_rate": 1.2993965462576523e-05, + "loss": 0.9457, + "step": 4412 + }, + { + "epoch": 0.74, + "learning_rate": 1.2988784440910913e-05, + "loss": 0.9019, + "step": 4413 + }, + { + "epoch": 0.74, + "learning_rate": 1.2983602538120415e-05, + "loss": 0.9177, + "step": 4414 + }, + { + "epoch": 0.74, + "learning_rate": 1.29784197557327e-05, + "loss": 0.8853, + "step": 4415 + }, + { + "epoch": 0.74, + "learning_rate": 1.2973236095275716e-05, + "loss": 0.8378, + "step": 4416 + }, + { + "epoch": 0.74, + "learning_rate": 1.2968051558277655e-05, + "loss": 0.8635, + "step": 4417 + }, + { + "epoch": 0.74, + "learning_rate": 1.2962866146266975e-05, + "loss": 0.8928, + "step": 4418 + }, + { + "epoch": 0.74, + "learning_rate": 1.2957679860772383e-05, + "loss": 0.8605, + "step": 4419 + }, + { + "epoch": 0.74, + "learning_rate": 1.295249270332286e-05, + "loss": 0.8878, + "step": 4420 + }, + { + "epoch": 0.74, + "learning_rate": 1.294730467544763e-05, + "loss": 0.9017, + "step": 4421 + }, + { + "epoch": 0.74, + "learning_rate": 1.2942115778676176e-05, + "loss": 0.3803, + "step": 4422 + }, + { + "epoch": 0.74, + "learning_rate": 1.293692601453824e-05, + "loss": 0.9095, + "step": 4423 + }, + { + "epoch": 0.74, + "learning_rate": 1.2931735384563821e-05, + "loss": 0.9061, + "step": 4424 + }, + { + "epoch": 0.74, + "learning_rate": 1.2926543890283168e-05, + "loss": 0.9191, + "step": 4425 + }, + { + "epoch": 0.74, + "learning_rate": 1.2921351533226789e-05, + "loss": 0.9439, + "step": 4426 + }, + { + "epoch": 0.74, + "learning_rate": 1.2916158314925444e-05, + "loss": 0.9027, + "step": 4427 + }, + { + "epoch": 0.74, + "learning_rate": 1.2910964236910152e-05, + "loss": 0.3988, + "step": 4428 + }, + { + "epoch": 0.74, + "learning_rate": 1.2905769300712176e-05, + "loss": 0.8744, + "step": 4429 + }, + { + "epoch": 0.74, + "learning_rate": 1.2900573507863041e-05, + "loss": 0.9389, + "step": 4430 + }, + { + "epoch": 0.74, + "learning_rate": 1.2895376859894519e-05, + "loss": 0.9108, + "step": 4431 + }, + { + "epoch": 0.74, + "learning_rate": 1.2890179358338642e-05, + "loss": 0.9116, + "step": 4432 + }, + { + "epoch": 0.74, + "learning_rate": 1.2884981004727676e-05, + "loss": 0.8856, + "step": 4433 + }, + { + "epoch": 0.74, + "learning_rate": 1.2879781800594162e-05, + "loss": 0.8776, + "step": 4434 + }, + { + "epoch": 0.74, + "learning_rate": 1.2874581747470874e-05, + "loss": 0.8771, + "step": 4435 + }, + { + "epoch": 0.74, + "learning_rate": 1.2869380846890842e-05, + "loss": 0.8097, + "step": 4436 + }, + { + "epoch": 0.74, + "learning_rate": 1.2864179100387349e-05, + "loss": 0.9009, + "step": 4437 + }, + { + "epoch": 0.74, + "learning_rate": 1.2858976509493915e-05, + "loss": 0.9439, + "step": 4438 + }, + { + "epoch": 0.74, + "learning_rate": 1.285377307574433e-05, + "loss": 0.9176, + "step": 4439 + }, + { + "epoch": 0.74, + "learning_rate": 1.2848568800672617e-05, + "loss": 0.903, + "step": 4440 + }, + { + "epoch": 0.74, + "learning_rate": 1.2843363685813048e-05, + "loss": 0.9382, + "step": 4441 + }, + { + "epoch": 0.74, + "learning_rate": 1.2838157732700143e-05, + "loss": 0.9485, + "step": 4442 + }, + { + "epoch": 0.74, + "learning_rate": 1.2832950942868681e-05, + "loss": 0.8766, + "step": 4443 + }, + { + "epoch": 0.75, + "learning_rate": 1.2827743317853667e-05, + "loss": 0.9757, + "step": 4444 + }, + { + "epoch": 0.75, + "learning_rate": 1.2822534859190371e-05, + "loss": 0.9351, + "step": 4445 + }, + { + "epoch": 0.75, + "learning_rate": 1.2817325568414299e-05, + "loss": 0.9054, + "step": 4446 + }, + { + "epoch": 0.75, + "learning_rate": 1.2812115447061203e-05, + "loss": 0.8655, + "step": 4447 + }, + { + "epoch": 0.75, + "learning_rate": 1.2806904496667081e-05, + "loss": 0.867, + "step": 4448 + }, + { + "epoch": 0.75, + "learning_rate": 1.2801692718768179e-05, + "loss": 0.9141, + "step": 4449 + }, + { + "epoch": 0.75, + "learning_rate": 1.2796480114900975e-05, + "loss": 0.8845, + "step": 4450 + }, + { + "epoch": 0.75, + "learning_rate": 1.2791266686602213e-05, + "loss": 0.3999, + "step": 4451 + }, + { + "epoch": 0.75, + "learning_rate": 1.2786052435408855e-05, + "loss": 0.891, + "step": 4452 + }, + { + "epoch": 0.75, + "learning_rate": 1.2780837362858121e-05, + "loss": 0.9187, + "step": 4453 + }, + { + "epoch": 0.75, + "learning_rate": 1.2775621470487467e-05, + "loss": 0.9091, + "step": 4454 + }, + { + "epoch": 0.75, + "learning_rate": 1.2770404759834593e-05, + "loss": 0.8989, + "step": 4455 + }, + { + "epoch": 0.75, + "learning_rate": 1.2765187232437444e-05, + "loss": 0.9073, + "step": 4456 + }, + { + "epoch": 0.75, + "learning_rate": 1.2759968889834195e-05, + "loss": 0.9234, + "step": 4457 + }, + { + "epoch": 0.75, + "learning_rate": 1.2754749733563273e-05, + "loss": 0.9075, + "step": 4458 + }, + { + "epoch": 0.75, + "learning_rate": 1.2749529765163335e-05, + "loss": 0.8657, + "step": 4459 + }, + { + "epoch": 0.75, + "learning_rate": 1.274430898617329e-05, + "loss": 0.8907, + "step": 4460 + }, + { + "epoch": 0.75, + "learning_rate": 1.2739087398132271e-05, + "loss": 0.9084, + "step": 4461 + }, + { + "epoch": 0.75, + "learning_rate": 1.273386500257966e-05, + "loss": 0.9537, + "step": 4462 + }, + { + "epoch": 0.75, + "learning_rate": 1.2728641801055068e-05, + "loss": 0.9061, + "step": 4463 + }, + { + "epoch": 0.75, + "learning_rate": 1.2723417795098358e-05, + "loss": 0.9649, + "step": 4464 + }, + { + "epoch": 0.75, + "learning_rate": 1.2718192986249618e-05, + "loss": 0.8501, + "step": 4465 + }, + { + "epoch": 0.75, + "learning_rate": 1.2712967376049177e-05, + "loss": 0.9622, + "step": 4466 + }, + { + "epoch": 0.75, + "learning_rate": 1.2707740966037596e-05, + "loss": 0.8461, + "step": 4467 + }, + { + "epoch": 0.75, + "learning_rate": 1.2702513757755681e-05, + "loss": 0.8928, + "step": 4468 + }, + { + "epoch": 0.75, + "learning_rate": 1.2697285752744462e-05, + "loss": 0.9321, + "step": 4469 + }, + { + "epoch": 0.75, + "learning_rate": 1.2692056952545214e-05, + "loss": 0.8705, + "step": 4470 + }, + { + "epoch": 0.75, + "learning_rate": 1.2686827358699435e-05, + "loss": 0.9629, + "step": 4471 + }, + { + "epoch": 0.75, + "learning_rate": 1.2681596972748876e-05, + "loss": 0.8815, + "step": 4472 + }, + { + "epoch": 0.75, + "learning_rate": 1.2676365796235497e-05, + "loss": 0.9115, + "step": 4473 + }, + { + "epoch": 0.75, + "learning_rate": 1.2671133830701512e-05, + "loss": 0.9348, + "step": 4474 + }, + { + "epoch": 0.75, + "learning_rate": 1.2665901077689352e-05, + "loss": 0.9046, + "step": 4475 + }, + { + "epoch": 0.75, + "learning_rate": 1.2660667538741696e-05, + "loss": 0.3775, + "step": 4476 + }, + { + "epoch": 0.75, + "learning_rate": 1.2655433215401438e-05, + "loss": 0.9313, + "step": 4477 + }, + { + "epoch": 0.75, + "learning_rate": 1.2650198109211718e-05, + "loss": 0.9001, + "step": 4478 + }, + { + "epoch": 0.75, + "learning_rate": 1.2644962221715893e-05, + "loss": 0.911, + "step": 4479 + }, + { + "epoch": 0.75, + "learning_rate": 1.2639725554457563e-05, + "loss": 0.892, + "step": 4480 + }, + { + "epoch": 0.75, + "learning_rate": 1.263448810898055e-05, + "loss": 0.9388, + "step": 4481 + }, + { + "epoch": 0.75, + "learning_rate": 1.2629249886828906e-05, + "loss": 0.9462, + "step": 4482 + }, + { + "epoch": 0.75, + "learning_rate": 1.2624010889546915e-05, + "loss": 0.9619, + "step": 4483 + }, + { + "epoch": 0.75, + "learning_rate": 1.261877111867909e-05, + "loss": 0.8769, + "step": 4484 + }, + { + "epoch": 0.75, + "learning_rate": 1.2613530575770169e-05, + "loss": 0.8732, + "step": 4485 + }, + { + "epoch": 0.75, + "learning_rate": 1.2608289262365119e-05, + "loss": 0.9106, + "step": 4486 + }, + { + "epoch": 0.75, + "learning_rate": 1.2603047180009131e-05, + "loss": 0.9412, + "step": 4487 + }, + { + "epoch": 0.75, + "learning_rate": 1.259780433024763e-05, + "loss": 0.9199, + "step": 4488 + }, + { + "epoch": 0.75, + "learning_rate": 1.2592560714626262e-05, + "loss": 0.9377, + "step": 4489 + }, + { + "epoch": 0.75, + "learning_rate": 1.2587316334690898e-05, + "loss": 0.8573, + "step": 4490 + }, + { + "epoch": 0.75, + "learning_rate": 1.2582071191987637e-05, + "loss": 0.9008, + "step": 4491 + }, + { + "epoch": 0.75, + "learning_rate": 1.2576825288062801e-05, + "loss": 0.3723, + "step": 4492 + }, + { + "epoch": 0.75, + "learning_rate": 1.257157862446294e-05, + "loss": 0.9173, + "step": 4493 + }, + { + "epoch": 0.75, + "learning_rate": 1.2566331202734822e-05, + "loss": 0.9119, + "step": 4494 + }, + { + "epoch": 0.75, + "learning_rate": 1.2561083024425443e-05, + "loss": 0.8763, + "step": 4495 + }, + { + "epoch": 0.75, + "learning_rate": 1.255583409108202e-05, + "loss": 0.8687, + "step": 4496 + }, + { + "epoch": 0.75, + "learning_rate": 1.2550584404251996e-05, + "loss": 0.8897, + "step": 4497 + }, + { + "epoch": 0.75, + "learning_rate": 1.2545333965483031e-05, + "loss": 0.8912, + "step": 4498 + }, + { + "epoch": 0.75, + "learning_rate": 1.2540082776323009e-05, + "loss": 0.8951, + "step": 4499 + }, + { + "epoch": 0.75, + "learning_rate": 1.2534830838320037e-05, + "loss": 0.9328, + "step": 4500 + }, + { + "epoch": 0.75, + "learning_rate": 1.2529578153022444e-05, + "loss": 0.9035, + "step": 4501 + }, + { + "epoch": 0.75, + "learning_rate": 1.2524324721978768e-05, + "loss": 0.9056, + "step": 4502 + }, + { + "epoch": 0.75, + "learning_rate": 1.2519070546737783e-05, + "loss": 0.8789, + "step": 4503 + }, + { + "epoch": 0.76, + "learning_rate": 1.2513815628848473e-05, + "loss": 0.8897, + "step": 4504 + }, + { + "epoch": 0.76, + "learning_rate": 1.2508559969860042e-05, + "loss": 0.9409, + "step": 4505 + }, + { + "epoch": 0.76, + "learning_rate": 1.2503303571321915e-05, + "loss": 0.8962, + "step": 4506 + }, + { + "epoch": 0.76, + "learning_rate": 1.2498046434783727e-05, + "loss": 0.8592, + "step": 4507 + }, + { + "epoch": 0.76, + "learning_rate": 1.2492788561795347e-05, + "loss": 0.9034, + "step": 4508 + }, + { + "epoch": 0.76, + "learning_rate": 1.2487529953906841e-05, + "loss": 0.9305, + "step": 4509 + }, + { + "epoch": 0.76, + "learning_rate": 1.2482270612668508e-05, + "loss": 0.9931, + "step": 4510 + }, + { + "epoch": 0.76, + "learning_rate": 1.247701053963085e-05, + "loss": 0.915, + "step": 4511 + }, + { + "epoch": 0.76, + "learning_rate": 1.2471749736344601e-05, + "loss": 0.3802, + "step": 4512 + }, + { + "epoch": 0.76, + "learning_rate": 1.2466488204360694e-05, + "loss": 0.9613, + "step": 4513 + }, + { + "epoch": 0.76, + "learning_rate": 1.2461225945230289e-05, + "loss": 0.9495, + "step": 4514 + }, + { + "epoch": 0.76, + "learning_rate": 1.2455962960504744e-05, + "loss": 0.8933, + "step": 4515 + }, + { + "epoch": 0.76, + "learning_rate": 1.2450699251735656e-05, + "loss": 0.9028, + "step": 4516 + }, + { + "epoch": 0.76, + "learning_rate": 1.2445434820474813e-05, + "loss": 0.9082, + "step": 4517 + }, + { + "epoch": 0.76, + "learning_rate": 1.2440169668274226e-05, + "loss": 0.3625, + "step": 4518 + }, + { + "epoch": 0.76, + "learning_rate": 1.2434903796686117e-05, + "loss": 0.8801, + "step": 4519 + }, + { + "epoch": 0.76, + "learning_rate": 1.242963720726292e-05, + "loss": 0.9093, + "step": 4520 + }, + { + "epoch": 0.76, + "learning_rate": 1.242436990155728e-05, + "loss": 0.8897, + "step": 4521 + }, + { + "epoch": 0.76, + "learning_rate": 1.2419101881122055e-05, + "loss": 0.9577, + "step": 4522 + }, + { + "epoch": 0.76, + "learning_rate": 1.2413833147510312e-05, + "loss": 0.93, + "step": 4523 + }, + { + "epoch": 0.76, + "learning_rate": 1.2408563702275329e-05, + "loss": 0.8499, + "step": 4524 + }, + { + "epoch": 0.76, + "learning_rate": 1.2403293546970593e-05, + "loss": 0.8867, + "step": 4525 + }, + { + "epoch": 0.76, + "learning_rate": 1.23980226831498e-05, + "loss": 0.9001, + "step": 4526 + }, + { + "epoch": 0.76, + "learning_rate": 1.2392751112366856e-05, + "loss": 0.4242, + "step": 4527 + }, + { + "epoch": 0.76, + "learning_rate": 1.2387478836175877e-05, + "loss": 0.8685, + "step": 4528 + }, + { + "epoch": 0.76, + "learning_rate": 1.2382205856131186e-05, + "loss": 0.8602, + "step": 4529 + }, + { + "epoch": 0.76, + "learning_rate": 1.2376932173787308e-05, + "loss": 0.9015, + "step": 4530 + }, + { + "epoch": 0.76, + "learning_rate": 1.2371657790698986e-05, + "loss": 0.9444, + "step": 4531 + }, + { + "epoch": 0.76, + "learning_rate": 1.2366382708421154e-05, + "loss": 0.9032, + "step": 4532 + }, + { + "epoch": 0.76, + "learning_rate": 1.2361106928508971e-05, + "loss": 0.9215, + "step": 4533 + }, + { + "epoch": 0.76, + "learning_rate": 1.2355830452517786e-05, + "loss": 0.8758, + "step": 4534 + }, + { + "epoch": 0.76, + "learning_rate": 1.2350553282003163e-05, + "loss": 0.819, + "step": 4535 + }, + { + "epoch": 0.76, + "learning_rate": 1.2345275418520863e-05, + "loss": 0.873, + "step": 4536 + }, + { + "epoch": 0.76, + "learning_rate": 1.233999686362686e-05, + "loss": 0.8054, + "step": 4537 + }, + { + "epoch": 0.76, + "learning_rate": 1.2334717618877324e-05, + "loss": 0.9274, + "step": 4538 + }, + { + "epoch": 0.76, + "learning_rate": 1.2329437685828634e-05, + "loss": 0.9346, + "step": 4539 + }, + { + "epoch": 0.76, + "learning_rate": 1.232415706603736e-05, + "loss": 0.8759, + "step": 4540 + }, + { + "epoch": 0.76, + "learning_rate": 1.23188757610603e-05, + "loss": 0.9444, + "step": 4541 + }, + { + "epoch": 0.76, + "learning_rate": 1.2313593772454428e-05, + "loss": 0.8561, + "step": 4542 + }, + { + "epoch": 0.76, + "learning_rate": 1.2308311101776933e-05, + "loss": 0.8212, + "step": 4543 + }, + { + "epoch": 0.76, + "learning_rate": 1.2303027750585195e-05, + "loss": 0.9592, + "step": 4544 + }, + { + "epoch": 0.76, + "learning_rate": 1.2297743720436807e-05, + "loss": 0.9015, + "step": 4545 + }, + { + "epoch": 0.76, + "learning_rate": 1.2292459012889555e-05, + "loss": 0.8656, + "step": 4546 + }, + { + "epoch": 0.76, + "learning_rate": 1.2287173629501428e-05, + "loss": 0.853, + "step": 4547 + }, + { + "epoch": 0.76, + "learning_rate": 1.2281887571830609e-05, + "loss": 0.9062, + "step": 4548 + }, + { + "epoch": 0.76, + "learning_rate": 1.2276600841435485e-05, + "loss": 0.9392, + "step": 4549 + }, + { + "epoch": 0.76, + "learning_rate": 1.227131343987464e-05, + "loss": 0.9194, + "step": 4550 + }, + { + "epoch": 0.76, + "learning_rate": 1.2266025368706851e-05, + "loss": 0.9974, + "step": 4551 + }, + { + "epoch": 0.76, + "learning_rate": 1.22607366294911e-05, + "loss": 0.8732, + "step": 4552 + }, + { + "epoch": 0.76, + "learning_rate": 1.225544722378656e-05, + "loss": 0.9026, + "step": 4553 + }, + { + "epoch": 0.76, + "learning_rate": 1.225015715315261e-05, + "loss": 0.9079, + "step": 4554 + }, + { + "epoch": 0.76, + "learning_rate": 1.2244866419148812e-05, + "loss": 0.3811, + "step": 4555 + }, + { + "epoch": 0.76, + "learning_rate": 1.2239575023334931e-05, + "loss": 1.0109, + "step": 4556 + }, + { + "epoch": 0.76, + "learning_rate": 1.2234282967270926e-05, + "loss": 0.8704, + "step": 4557 + }, + { + "epoch": 0.76, + "learning_rate": 1.2228990252516951e-05, + "loss": 0.8706, + "step": 4558 + }, + { + "epoch": 0.76, + "learning_rate": 1.2223696880633349e-05, + "loss": 1.0089, + "step": 4559 + }, + { + "epoch": 0.76, + "learning_rate": 1.2218402853180669e-05, + "loss": 0.8893, + "step": 4560 + }, + { + "epoch": 0.76, + "learning_rate": 1.221310817171964e-05, + "loss": 0.8853, + "step": 4561 + }, + { + "epoch": 0.76, + "learning_rate": 1.2207812837811194e-05, + "loss": 0.8965, + "step": 4562 + }, + { + "epoch": 0.76, + "learning_rate": 1.2202516853016447e-05, + "loss": 0.9014, + "step": 4563 + }, + { + "epoch": 0.77, + "learning_rate": 1.219722021889671e-05, + "loss": 0.9321, + "step": 4564 + }, + { + "epoch": 0.77, + "learning_rate": 1.2191922937013489e-05, + "loss": 0.4044, + "step": 4565 + }, + { + "epoch": 0.77, + "learning_rate": 1.2186625008928479e-05, + "loss": 0.8957, + "step": 4566 + }, + { + "epoch": 0.77, + "learning_rate": 1.218132643620356e-05, + "loss": 0.8804, + "step": 4567 + }, + { + "epoch": 0.77, + "learning_rate": 1.217602722040081e-05, + "loss": 0.872, + "step": 4568 + }, + { + "epoch": 0.77, + "learning_rate": 1.2170727363082495e-05, + "loss": 0.9045, + "step": 4569 + }, + { + "epoch": 0.77, + "learning_rate": 1.2165426865811061e-05, + "loss": 0.9139, + "step": 4570 + }, + { + "epoch": 0.77, + "learning_rate": 1.2160125730149157e-05, + "loss": 0.9582, + "step": 4571 + }, + { + "epoch": 0.77, + "learning_rate": 1.215482395765961e-05, + "loss": 0.9132, + "step": 4572 + }, + { + "epoch": 0.77, + "learning_rate": 1.2149521549905441e-05, + "loss": 0.8497, + "step": 4573 + }, + { + "epoch": 0.77, + "learning_rate": 1.2144218508449852e-05, + "loss": 0.91, + "step": 4574 + }, + { + "epoch": 0.77, + "learning_rate": 1.2138914834856237e-05, + "loss": 0.9404, + "step": 4575 + }, + { + "epoch": 0.77, + "learning_rate": 1.2133610530688169e-05, + "loss": 0.9019, + "step": 4576 + }, + { + "epoch": 0.77, + "learning_rate": 1.2128305597509422e-05, + "loss": 0.9181, + "step": 4577 + }, + { + "epoch": 0.77, + "learning_rate": 1.2123000036883939e-05, + "loss": 0.8318, + "step": 4578 + }, + { + "epoch": 0.77, + "learning_rate": 1.2117693850375858e-05, + "loss": 0.9282, + "step": 4579 + }, + { + "epoch": 0.77, + "learning_rate": 1.2112387039549492e-05, + "loss": 0.9182, + "step": 4580 + }, + { + "epoch": 0.77, + "learning_rate": 1.2107079605969351e-05, + "loss": 0.8782, + "step": 4581 + }, + { + "epoch": 0.77, + "learning_rate": 1.2101771551200121e-05, + "loss": 0.9212, + "step": 4582 + }, + { + "epoch": 0.77, + "learning_rate": 1.2096462876806672e-05, + "loss": 0.8954, + "step": 4583 + }, + { + "epoch": 0.77, + "learning_rate": 1.209115358435405e-05, + "loss": 0.9608, + "step": 4584 + }, + { + "epoch": 0.77, + "learning_rate": 1.20858436754075e-05, + "loss": 0.8937, + "step": 4585 + }, + { + "epoch": 0.77, + "learning_rate": 1.2080533151532432e-05, + "loss": 0.8779, + "step": 4586 + }, + { + "epoch": 0.77, + "learning_rate": 1.2075222014294448e-05, + "loss": 0.865, + "step": 4587 + }, + { + "epoch": 0.77, + "learning_rate": 1.2069910265259323e-05, + "loss": 0.8729, + "step": 4588 + }, + { + "epoch": 0.77, + "learning_rate": 1.2064597905993018e-05, + "loss": 0.9257, + "step": 4589 + }, + { + "epoch": 0.77, + "learning_rate": 1.205928493806167e-05, + "loss": 0.8856, + "step": 4590 + }, + { + "epoch": 0.77, + "learning_rate": 1.2053971363031605e-05, + "loss": 0.9052, + "step": 4591 + }, + { + "epoch": 0.77, + "learning_rate": 1.204865718246931e-05, + "loss": 0.9644, + "step": 4592 + }, + { + "epoch": 0.77, + "learning_rate": 1.2043342397941466e-05, + "loss": 0.9208, + "step": 4593 + }, + { + "epoch": 0.77, + "learning_rate": 1.203802701101493e-05, + "loss": 0.3704, + "step": 4594 + }, + { + "epoch": 0.77, + "learning_rate": 1.2032711023256727e-05, + "loss": 0.9508, + "step": 4595 + }, + { + "epoch": 0.77, + "learning_rate": 1.2027394436234072e-05, + "loss": 0.8976, + "step": 4596 + }, + { + "epoch": 0.77, + "learning_rate": 1.2022077251514343e-05, + "loss": 0.938, + "step": 4597 + }, + { + "epoch": 0.77, + "learning_rate": 1.2016759470665112e-05, + "loss": 0.8946, + "step": 4598 + }, + { + "epoch": 0.77, + "learning_rate": 1.2011441095254109e-05, + "loss": 0.9414, + "step": 4599 + }, + { + "epoch": 0.77, + "learning_rate": 1.2006122126849247e-05, + "loss": 0.9222, + "step": 4600 + }, + { + "epoch": 0.77, + "learning_rate": 1.2000802567018613e-05, + "loss": 0.9208, + "step": 4601 + }, + { + "epoch": 0.77, + "learning_rate": 1.1995482417330473e-05, + "loss": 0.952, + "step": 4602 + }, + { + "epoch": 0.77, + "learning_rate": 1.199016167935326e-05, + "loss": 0.8737, + "step": 4603 + }, + { + "epoch": 0.77, + "learning_rate": 1.1984840354655585e-05, + "loss": 0.9234, + "step": 4604 + }, + { + "epoch": 0.77, + "learning_rate": 1.1979518444806224e-05, + "loss": 0.8674, + "step": 4605 + }, + { + "epoch": 0.77, + "learning_rate": 1.1974195951374141e-05, + "loss": 0.8813, + "step": 4606 + }, + { + "epoch": 0.77, + "learning_rate": 1.1968872875928455e-05, + "loss": 0.8982, + "step": 4607 + }, + { + "epoch": 0.77, + "learning_rate": 1.196354922003847e-05, + "loss": 0.846, + "step": 4608 + }, + { + "epoch": 0.77, + "learning_rate": 1.1958224985273648e-05, + "loss": 0.902, + "step": 4609 + }, + { + "epoch": 0.77, + "learning_rate": 1.1952900173203639e-05, + "loss": 0.9256, + "step": 4610 + }, + { + "epoch": 0.77, + "learning_rate": 1.1947574785398244e-05, + "loss": 0.9024, + "step": 4611 + }, + { + "epoch": 0.77, + "learning_rate": 1.1942248823427449e-05, + "loss": 0.8928, + "step": 4612 + }, + { + "epoch": 0.77, + "learning_rate": 1.19369222888614e-05, + "loss": 0.873, + "step": 4613 + }, + { + "epoch": 0.77, + "learning_rate": 1.1931595183270417e-05, + "loss": 0.9067, + "step": 4614 + }, + { + "epoch": 0.77, + "learning_rate": 1.1926267508224987e-05, + "loss": 0.9096, + "step": 4615 + }, + { + "epoch": 0.77, + "learning_rate": 1.1920939265295762e-05, + "loss": 0.8701, + "step": 4616 + }, + { + "epoch": 0.77, + "learning_rate": 1.1915610456053563e-05, + "loss": 0.9078, + "step": 4617 + }, + { + "epoch": 0.77, + "learning_rate": 1.1910281082069381e-05, + "loss": 0.9014, + "step": 4618 + }, + { + "epoch": 0.77, + "learning_rate": 1.1904951144914372e-05, + "loss": 0.8905, + "step": 4619 + }, + { + "epoch": 0.77, + "learning_rate": 1.1899620646159855e-05, + "loss": 0.8883, + "step": 4620 + }, + { + "epoch": 0.77, + "learning_rate": 1.1894289587377315e-05, + "loss": 0.9536, + "step": 4621 + }, + { + "epoch": 0.77, + "learning_rate": 1.1888957970138408e-05, + "loss": 0.9444, + "step": 4622 + }, + { + "epoch": 0.78, + "learning_rate": 1.1883625796014951e-05, + "loss": 0.908, + "step": 4623 + }, + { + "epoch": 0.78, + "learning_rate": 1.187829306657892e-05, + "loss": 0.8322, + "step": 4624 + }, + { + "epoch": 0.78, + "learning_rate": 1.1872959783402461e-05, + "loss": 0.952, + "step": 4625 + }, + { + "epoch": 0.78, + "learning_rate": 1.1867625948057882e-05, + "loss": 0.8915, + "step": 4626 + }, + { + "epoch": 0.78, + "learning_rate": 1.1862291562117656e-05, + "loss": 0.4036, + "step": 4627 + }, + { + "epoch": 0.78, + "learning_rate": 1.1856956627154412e-05, + "loss": 0.8925, + "step": 4628 + }, + { + "epoch": 0.78, + "learning_rate": 1.1851621144740945e-05, + "loss": 0.4088, + "step": 4629 + }, + { + "epoch": 0.78, + "learning_rate": 1.1846285116450214e-05, + "loss": 0.9516, + "step": 4630 + }, + { + "epoch": 0.78, + "learning_rate": 1.1840948543855336e-05, + "loss": 0.9126, + "step": 4631 + }, + { + "epoch": 0.78, + "learning_rate": 1.1835611428529581e-05, + "loss": 0.9189, + "step": 4632 + }, + { + "epoch": 0.78, + "learning_rate": 1.1830273772046395e-05, + "loss": 0.9165, + "step": 4633 + }, + { + "epoch": 0.78, + "learning_rate": 1.1824935575979373e-05, + "loss": 0.8938, + "step": 4634 + }, + { + "epoch": 0.78, + "learning_rate": 1.1819596841902267e-05, + "loss": 0.9515, + "step": 4635 + }, + { + "epoch": 0.78, + "learning_rate": 1.1814257571388994e-05, + "loss": 0.8796, + "step": 4636 + }, + { + "epoch": 0.78, + "learning_rate": 1.1808917766013627e-05, + "loss": 0.8761, + "step": 4637 + }, + { + "epoch": 0.78, + "learning_rate": 1.1803577427350401e-05, + "loss": 0.8475, + "step": 4638 + }, + { + "epoch": 0.78, + "learning_rate": 1.1798236556973694e-05, + "loss": 0.9238, + "step": 4639 + }, + { + "epoch": 0.78, + "learning_rate": 1.179289515645806e-05, + "loss": 0.87, + "step": 4640 + }, + { + "epoch": 0.78, + "learning_rate": 1.1787553227378189e-05, + "loss": 0.8599, + "step": 4641 + }, + { + "epoch": 0.78, + "learning_rate": 1.1782210771308948e-05, + "loss": 0.9368, + "step": 4642 + }, + { + "epoch": 0.78, + "learning_rate": 1.1776867789825344e-05, + "loss": 0.927, + "step": 4643 + }, + { + "epoch": 0.78, + "learning_rate": 1.1771524284502544e-05, + "loss": 0.8764, + "step": 4644 + }, + { + "epoch": 0.78, + "learning_rate": 1.1766180256915867e-05, + "loss": 0.95, + "step": 4645 + }, + { + "epoch": 0.78, + "learning_rate": 1.1760835708640794e-05, + "loss": 0.8787, + "step": 4646 + }, + { + "epoch": 0.78, + "learning_rate": 1.1755490641252951e-05, + "loss": 0.9223, + "step": 4647 + }, + { + "epoch": 0.78, + "learning_rate": 1.1750145056328119e-05, + "loss": 0.8819, + "step": 4648 + }, + { + "epoch": 0.78, + "learning_rate": 1.1744798955442229e-05, + "loss": 0.892, + "step": 4649 + }, + { + "epoch": 0.78, + "learning_rate": 1.1739452340171376e-05, + "loss": 0.8856, + "step": 4650 + }, + { + "epoch": 0.78, + "learning_rate": 1.1734105212091789e-05, + "loss": 0.9355, + "step": 4651 + }, + { + "epoch": 0.78, + "learning_rate": 1.1728757572779863e-05, + "loss": 0.8903, + "step": 4652 + }, + { + "epoch": 0.78, + "learning_rate": 1.1723409423812135e-05, + "loss": 0.3742, + "step": 4653 + }, + { + "epoch": 0.78, + "learning_rate": 1.1718060766765298e-05, + "loss": 0.886, + "step": 4654 + }, + { + "epoch": 0.78, + "learning_rate": 1.171271160321619e-05, + "loss": 0.9428, + "step": 4655 + }, + { + "epoch": 0.78, + "learning_rate": 1.17073619347418e-05, + "loss": 0.9105, + "step": 4656 + }, + { + "epoch": 0.78, + "learning_rate": 1.1702011762919268e-05, + "loss": 0.9311, + "step": 4657 + }, + { + "epoch": 0.78, + "learning_rate": 1.1696661089325878e-05, + "loss": 0.924, + "step": 4658 + }, + { + "epoch": 0.78, + "learning_rate": 1.1691309915539069e-05, + "loss": 0.8195, + "step": 4659 + }, + { + "epoch": 0.78, + "learning_rate": 1.1685958243136418e-05, + "loss": 0.9235, + "step": 4660 + }, + { + "epoch": 0.78, + "learning_rate": 1.1680606073695652e-05, + "loss": 0.8376, + "step": 4661 + }, + { + "epoch": 0.78, + "learning_rate": 1.1675253408794656e-05, + "loss": 0.8819, + "step": 4662 + }, + { + "epoch": 0.78, + "learning_rate": 1.1669900250011444e-05, + "loss": 0.8692, + "step": 4663 + }, + { + "epoch": 0.78, + "learning_rate": 1.1664546598924184e-05, + "loss": 0.9199, + "step": 4664 + }, + { + "epoch": 0.78, + "learning_rate": 1.1659192457111193e-05, + "loss": 0.8574, + "step": 4665 + }, + { + "epoch": 0.78, + "learning_rate": 1.1653837826150918e-05, + "loss": 0.9001, + "step": 4666 + }, + { + "epoch": 0.78, + "learning_rate": 1.1648482707621972e-05, + "loss": 0.8714, + "step": 4667 + }, + { + "epoch": 0.78, + "learning_rate": 1.1643127103103094e-05, + "loss": 0.936, + "step": 4668 + }, + { + "epoch": 0.78, + "learning_rate": 1.1637771014173172e-05, + "loss": 0.8903, + "step": 4669 + }, + { + "epoch": 0.78, + "learning_rate": 1.1632414442411233e-05, + "loss": 0.3717, + "step": 4670 + }, + { + "epoch": 0.78, + "learning_rate": 1.1627057389396462e-05, + "loss": 0.8767, + "step": 4671 + }, + { + "epoch": 0.78, + "learning_rate": 1.1621699856708164e-05, + "loss": 0.3909, + "step": 4672 + }, + { + "epoch": 0.78, + "learning_rate": 1.16163418459258e-05, + "loss": 0.9017, + "step": 4673 + }, + { + "epoch": 0.78, + "learning_rate": 1.1610983358628962e-05, + "loss": 0.8844, + "step": 4674 + }, + { + "epoch": 0.78, + "learning_rate": 1.16056243963974e-05, + "loss": 0.8789, + "step": 4675 + }, + { + "epoch": 0.78, + "learning_rate": 1.1600264960810978e-05, + "loss": 0.8948, + "step": 4676 + }, + { + "epoch": 0.78, + "learning_rate": 1.1594905053449724e-05, + "loss": 0.855, + "step": 4677 + }, + { + "epoch": 0.78, + "learning_rate": 1.158954467589379e-05, + "loss": 0.89, + "step": 4678 + }, + { + "epoch": 0.78, + "learning_rate": 1.158418382972347e-05, + "loss": 0.8833, + "step": 4679 + }, + { + "epoch": 0.78, + "learning_rate": 1.15788225165192e-05, + "loss": 0.3668, + "step": 4680 + }, + { + "epoch": 0.78, + "learning_rate": 1.1573460737861555e-05, + "loss": 0.9074, + "step": 4681 + }, + { + "epoch": 0.78, + "learning_rate": 1.1568098495331235e-05, + "loss": 0.846, + "step": 4682 + }, + { + "epoch": 0.79, + "learning_rate": 1.156273579050909e-05, + "loss": 0.3841, + "step": 4683 + }, + { + "epoch": 0.79, + "learning_rate": 1.1557372624976105e-05, + "loss": 0.8897, + "step": 4684 + }, + { + "epoch": 0.79, + "learning_rate": 1.1552009000313387e-05, + "loss": 0.8758, + "step": 4685 + }, + { + "epoch": 0.79, + "learning_rate": 1.1546644918102197e-05, + "loss": 0.886, + "step": 4686 + }, + { + "epoch": 0.79, + "learning_rate": 1.154128037992392e-05, + "loss": 0.8878, + "step": 4687 + }, + { + "epoch": 0.79, + "learning_rate": 1.1535915387360081e-05, + "loss": 0.8915, + "step": 4688 + }, + { + "epoch": 0.79, + "learning_rate": 1.153054994199233e-05, + "loss": 0.8471, + "step": 4689 + }, + { + "epoch": 0.79, + "learning_rate": 1.152518404540246e-05, + "loss": 0.8721, + "step": 4690 + }, + { + "epoch": 0.79, + "learning_rate": 1.1519817699172392e-05, + "loss": 0.9347, + "step": 4691 + }, + { + "epoch": 0.79, + "learning_rate": 1.1514450904884182e-05, + "loss": 0.9346, + "step": 4692 + }, + { + "epoch": 0.79, + "learning_rate": 1.1509083664120017e-05, + "loss": 0.8974, + "step": 4693 + }, + { + "epoch": 0.79, + "learning_rate": 1.1503715978462216e-05, + "loss": 0.8577, + "step": 4694 + }, + { + "epoch": 0.79, + "learning_rate": 1.1498347849493227e-05, + "loss": 0.3857, + "step": 4695 + }, + { + "epoch": 0.79, + "learning_rate": 1.1492979278795635e-05, + "loss": 0.3443, + "step": 4696 + }, + { + "epoch": 0.79, + "learning_rate": 1.1487610267952143e-05, + "loss": 0.923, + "step": 4697 + }, + { + "epoch": 0.79, + "learning_rate": 1.1482240818545597e-05, + "loss": 0.8674, + "step": 4698 + }, + { + "epoch": 0.79, + "learning_rate": 1.1476870932158967e-05, + "loss": 0.92, + "step": 4699 + }, + { + "epoch": 0.79, + "learning_rate": 1.1471500610375352e-05, + "loss": 0.8701, + "step": 4700 + }, + { + "epoch": 0.79, + "learning_rate": 1.1466129854777977e-05, + "loss": 0.853, + "step": 4701 + }, + { + "epoch": 0.79, + "learning_rate": 1.1460758666950196e-05, + "loss": 0.9004, + "step": 4702 + }, + { + "epoch": 0.79, + "learning_rate": 1.1455387048475492e-05, + "loss": 0.8967, + "step": 4703 + }, + { + "epoch": 0.79, + "learning_rate": 1.1450015000937476e-05, + "loss": 0.8912, + "step": 4704 + }, + { + "epoch": 0.79, + "learning_rate": 1.1444642525919883e-05, + "loss": 0.9353, + "step": 4705 + }, + { + "epoch": 0.79, + "learning_rate": 1.1439269625006573e-05, + "loss": 0.8819, + "step": 4706 + }, + { + "epoch": 0.79, + "learning_rate": 1.1433896299781538e-05, + "loss": 0.9218, + "step": 4707 + }, + { + "epoch": 0.79, + "learning_rate": 1.1428522551828885e-05, + "loss": 0.8796, + "step": 4708 + }, + { + "epoch": 0.79, + "learning_rate": 1.1423148382732854e-05, + "loss": 0.9304, + "step": 4709 + }, + { + "epoch": 0.79, + "learning_rate": 1.1417773794077801e-05, + "loss": 0.8921, + "step": 4710 + }, + { + "epoch": 0.79, + "learning_rate": 1.1412398787448222e-05, + "loss": 0.8793, + "step": 4711 + }, + { + "epoch": 0.79, + "learning_rate": 1.1407023364428715e-05, + "loss": 0.9542, + "step": 4712 + }, + { + "epoch": 0.79, + "learning_rate": 1.1401647526604018e-05, + "loss": 0.8246, + "step": 4713 + }, + { + "epoch": 0.79, + "learning_rate": 1.1396271275558975e-05, + "loss": 0.854, + "step": 4714 + }, + { + "epoch": 0.79, + "learning_rate": 1.1390894612878571e-05, + "loss": 0.9082, + "step": 4715 + }, + { + "epoch": 0.79, + "learning_rate": 1.1385517540147897e-05, + "loss": 0.9159, + "step": 4716 + }, + { + "epoch": 0.79, + "learning_rate": 1.1380140058952174e-05, + "loss": 0.9452, + "step": 4717 + }, + { + "epoch": 0.79, + "learning_rate": 1.1374762170876731e-05, + "loss": 0.8801, + "step": 4718 + }, + { + "epoch": 0.79, + "learning_rate": 1.1369383877507035e-05, + "loss": 0.8997, + "step": 4719 + }, + { + "epoch": 0.79, + "learning_rate": 1.1364005180428658e-05, + "loss": 0.9369, + "step": 4720 + }, + { + "epoch": 0.79, + "learning_rate": 1.13586260812273e-05, + "loss": 0.8397, + "step": 4721 + }, + { + "epoch": 0.79, + "learning_rate": 1.1353246581488773e-05, + "loss": 0.9441, + "step": 4722 + }, + { + "epoch": 0.79, + "learning_rate": 1.1347866682799007e-05, + "loss": 0.8651, + "step": 4723 + }, + { + "epoch": 0.79, + "learning_rate": 1.1342486386744058e-05, + "loss": 0.833, + "step": 4724 + }, + { + "epoch": 0.79, + "learning_rate": 1.1337105694910093e-05, + "loss": 0.9501, + "step": 4725 + }, + { + "epoch": 0.79, + "learning_rate": 1.1331724608883389e-05, + "loss": 0.3814, + "step": 4726 + }, + { + "epoch": 0.79, + "learning_rate": 1.1326343130250357e-05, + "loss": 0.9262, + "step": 4727 + }, + { + "epoch": 0.79, + "learning_rate": 1.1320961260597505e-05, + "loss": 0.8557, + "step": 4728 + }, + { + "epoch": 0.79, + "learning_rate": 1.1315579001511467e-05, + "loss": 0.9295, + "step": 4729 + }, + { + "epoch": 0.79, + "learning_rate": 1.1310196354578993e-05, + "loss": 0.9306, + "step": 4730 + }, + { + "epoch": 0.79, + "learning_rate": 1.1304813321386934e-05, + "loss": 0.8927, + "step": 4731 + }, + { + "epoch": 0.79, + "learning_rate": 1.1299429903522274e-05, + "loss": 0.8875, + "step": 4732 + }, + { + "epoch": 0.79, + "learning_rate": 1.1294046102572097e-05, + "loss": 0.9594, + "step": 4733 + }, + { + "epoch": 0.79, + "learning_rate": 1.1288661920123604e-05, + "loss": 0.907, + "step": 4734 + }, + { + "epoch": 0.79, + "learning_rate": 1.1283277357764104e-05, + "loss": 0.9007, + "step": 4735 + }, + { + "epoch": 0.79, + "learning_rate": 1.1277892417081033e-05, + "loss": 0.9847, + "step": 4736 + }, + { + "epoch": 0.79, + "learning_rate": 1.1272507099661919e-05, + "loss": 0.866, + "step": 4737 + }, + { + "epoch": 0.79, + "learning_rate": 1.1267121407094412e-05, + "loss": 0.8567, + "step": 4738 + }, + { + "epoch": 0.79, + "learning_rate": 1.1261735340966269e-05, + "loss": 0.9112, + "step": 4739 + }, + { + "epoch": 0.79, + "learning_rate": 1.1256348902865363e-05, + "loss": 0.8891, + "step": 4740 + }, + { + "epoch": 0.79, + "learning_rate": 1.125096209437967e-05, + "loss": 0.364, + "step": 4741 + }, + { + "epoch": 0.79, + "learning_rate": 1.1245574917097275e-05, + "loss": 0.9087, + "step": 4742 + }, + { + "epoch": 0.8, + "learning_rate": 1.1240187372606376e-05, + "loss": 0.8967, + "step": 4743 + }, + { + "epoch": 0.8, + "learning_rate": 1.1234799462495278e-05, + "loss": 0.8851, + "step": 4744 + }, + { + "epoch": 0.8, + "learning_rate": 1.1229411188352392e-05, + "loss": 0.8789, + "step": 4745 + }, + { + "epoch": 0.8, + "learning_rate": 1.1224022551766242e-05, + "loss": 0.8822, + "step": 4746 + }, + { + "epoch": 0.8, + "learning_rate": 1.1218633554325449e-05, + "loss": 0.9358, + "step": 4747 + }, + { + "epoch": 0.8, + "learning_rate": 1.1213244197618745e-05, + "loss": 0.8847, + "step": 4748 + }, + { + "epoch": 0.8, + "learning_rate": 1.1207854483234974e-05, + "loss": 0.9066, + "step": 4749 + }, + { + "epoch": 0.8, + "learning_rate": 1.1202464412763075e-05, + "loss": 0.8497, + "step": 4750 + }, + { + "epoch": 0.8, + "learning_rate": 1.1197073987792099e-05, + "loss": 0.9262, + "step": 4751 + }, + { + "epoch": 0.8, + "learning_rate": 1.1191683209911202e-05, + "loss": 0.8571, + "step": 4752 + }, + { + "epoch": 0.8, + "learning_rate": 1.1186292080709639e-05, + "loss": 0.9462, + "step": 4753 + }, + { + "epoch": 0.8, + "learning_rate": 1.1180900601776775e-05, + "loss": 0.8958, + "step": 4754 + }, + { + "epoch": 0.8, + "learning_rate": 1.1175508774702066e-05, + "loss": 0.8244, + "step": 4755 + }, + { + "epoch": 0.8, + "learning_rate": 1.1170116601075085e-05, + "loss": 0.9687, + "step": 4756 + }, + { + "epoch": 0.8, + "learning_rate": 1.1164724082485502e-05, + "loss": 0.925, + "step": 4757 + }, + { + "epoch": 0.8, + "learning_rate": 1.1159331220523088e-05, + "loss": 0.8972, + "step": 4758 + }, + { + "epoch": 0.8, + "learning_rate": 1.115393801677771e-05, + "loss": 0.8719, + "step": 4759 + }, + { + "epoch": 0.8, + "learning_rate": 1.1148544472839345e-05, + "loss": 0.9579, + "step": 4760 + }, + { + "epoch": 0.8, + "learning_rate": 1.1143150590298066e-05, + "loss": 0.8357, + "step": 4761 + }, + { + "epoch": 0.8, + "learning_rate": 1.1137756370744046e-05, + "loss": 0.8945, + "step": 4762 + }, + { + "epoch": 0.8, + "learning_rate": 1.1132361815767554e-05, + "loss": 0.8924, + "step": 4763 + }, + { + "epoch": 0.8, + "learning_rate": 1.1126966926958965e-05, + "loss": 0.9052, + "step": 4764 + }, + { + "epoch": 0.8, + "learning_rate": 1.1121571705908748e-05, + "loss": 0.9258, + "step": 4765 + }, + { + "epoch": 0.8, + "learning_rate": 1.1116176154207469e-05, + "loss": 0.9537, + "step": 4766 + }, + { + "epoch": 0.8, + "learning_rate": 1.1110780273445794e-05, + "loss": 0.3696, + "step": 4767 + }, + { + "epoch": 0.8, + "learning_rate": 1.1105384065214485e-05, + "loss": 0.8359, + "step": 4768 + }, + { + "epoch": 0.8, + "learning_rate": 1.1099987531104401e-05, + "loss": 0.9418, + "step": 4769 + }, + { + "epoch": 0.8, + "learning_rate": 1.1094590672706495e-05, + "loss": 0.918, + "step": 4770 + }, + { + "epoch": 0.8, + "learning_rate": 1.1089193491611819e-05, + "loss": 0.8857, + "step": 4771 + }, + { + "epoch": 0.8, + "learning_rate": 1.1083795989411518e-05, + "loss": 0.8528, + "step": 4772 + }, + { + "epoch": 0.8, + "learning_rate": 1.1078398167696831e-05, + "loss": 0.8909, + "step": 4773 + }, + { + "epoch": 0.8, + "learning_rate": 1.1073000028059095e-05, + "loss": 0.8571, + "step": 4774 + }, + { + "epoch": 0.8, + "learning_rate": 1.1067601572089732e-05, + "loss": 0.8955, + "step": 4775 + }, + { + "epoch": 0.8, + "learning_rate": 1.1062202801380273e-05, + "loss": 0.8761, + "step": 4776 + }, + { + "epoch": 0.8, + "learning_rate": 1.1056803717522322e-05, + "loss": 0.9233, + "step": 4777 + }, + { + "epoch": 0.8, + "learning_rate": 1.1051404322107595e-05, + "loss": 0.8808, + "step": 4778 + }, + { + "epoch": 0.8, + "learning_rate": 1.1046004616727877e-05, + "loss": 0.8544, + "step": 4779 + }, + { + "epoch": 0.8, + "learning_rate": 1.104060460297507e-05, + "loss": 0.9034, + "step": 4780 + }, + { + "epoch": 0.8, + "learning_rate": 1.1035204282441152e-05, + "loss": 0.8974, + "step": 4781 + }, + { + "epoch": 0.8, + "learning_rate": 1.1029803656718192e-05, + "loss": 0.9226, + "step": 4782 + }, + { + "epoch": 0.8, + "learning_rate": 1.1024402727398347e-05, + "loss": 0.8654, + "step": 4783 + }, + { + "epoch": 0.8, + "learning_rate": 1.1019001496073873e-05, + "loss": 0.8995, + "step": 4784 + }, + { + "epoch": 0.8, + "learning_rate": 1.1013599964337107e-05, + "loss": 0.8879, + "step": 4785 + }, + { + "epoch": 0.8, + "learning_rate": 1.1008198133780479e-05, + "loss": 0.8276, + "step": 4786 + }, + { + "epoch": 0.8, + "learning_rate": 1.1002796005996503e-05, + "loss": 0.9203, + "step": 4787 + }, + { + "epoch": 0.8, + "learning_rate": 1.0997393582577784e-05, + "loss": 0.8919, + "step": 4788 + }, + { + "epoch": 0.8, + "learning_rate": 1.0991990865117011e-05, + "loss": 0.8496, + "step": 4789 + }, + { + "epoch": 0.8, + "learning_rate": 1.0986587855206968e-05, + "loss": 0.934, + "step": 4790 + }, + { + "epoch": 0.8, + "learning_rate": 1.0981184554440507e-05, + "loss": 0.8833, + "step": 4791 + }, + { + "epoch": 0.8, + "learning_rate": 1.0975780964410587e-05, + "loss": 0.9089, + "step": 4792 + }, + { + "epoch": 0.8, + "learning_rate": 1.0970377086710244e-05, + "loss": 0.9742, + "step": 4793 + }, + { + "epoch": 0.8, + "learning_rate": 1.0964972922932589e-05, + "loss": 0.9108, + "step": 4794 + }, + { + "epoch": 0.8, + "learning_rate": 1.0959568474670831e-05, + "loss": 0.9019, + "step": 4795 + }, + { + "epoch": 0.8, + "learning_rate": 1.095416374351826e-05, + "loss": 0.9499, + "step": 4796 + }, + { + "epoch": 0.8, + "learning_rate": 1.0948758731068246e-05, + "loss": 0.8763, + "step": 4797 + }, + { + "epoch": 0.8, + "learning_rate": 1.0943353438914237e-05, + "loss": 0.9199, + "step": 4798 + }, + { + "epoch": 0.8, + "learning_rate": 1.0937947868649781e-05, + "loss": 0.9219, + "step": 4799 + }, + { + "epoch": 0.8, + "learning_rate": 1.0932542021868482e-05, + "loss": 0.8384, + "step": 4800 + }, + { + "epoch": 0.8, + "learning_rate": 1.0927135900164056e-05, + "loss": 0.8575, + "step": 4801 + }, + { + "epoch": 0.81, + "learning_rate": 1.0921729505130272e-05, + "loss": 0.9245, + "step": 4802 + }, + { + "epoch": 0.81, + "learning_rate": 1.0916322838361e-05, + "loss": 0.9773, + "step": 4803 + }, + { + "epoch": 0.81, + "learning_rate": 1.0910915901450173e-05, + "loss": 0.8873, + "step": 4804 + }, + { + "epoch": 0.81, + "learning_rate": 1.0905508695991822e-05, + "loss": 0.8876, + "step": 4805 + }, + { + "epoch": 0.81, + "learning_rate": 1.0900101223580042e-05, + "loss": 0.8987, + "step": 4806 + }, + { + "epoch": 0.81, + "learning_rate": 1.0894693485809016e-05, + "loss": 0.9601, + "step": 4807 + }, + { + "epoch": 0.81, + "learning_rate": 1.0889285484272993e-05, + "loss": 0.9447, + "step": 4808 + }, + { + "epoch": 0.81, + "learning_rate": 1.0883877220566322e-05, + "loss": 0.8519, + "step": 4809 + }, + { + "epoch": 0.81, + "learning_rate": 1.0878468696283404e-05, + "loss": 1.0192, + "step": 4810 + }, + { + "epoch": 0.81, + "learning_rate": 1.0873059913018735e-05, + "loss": 0.3849, + "step": 4811 + }, + { + "epoch": 0.81, + "learning_rate": 1.0867650872366877e-05, + "loss": 0.9456, + "step": 4812 + }, + { + "epoch": 0.81, + "learning_rate": 1.0862241575922474e-05, + "loss": 0.921, + "step": 4813 + }, + { + "epoch": 0.81, + "learning_rate": 1.0856832025280244e-05, + "loss": 0.8766, + "step": 4814 + }, + { + "epoch": 0.81, + "learning_rate": 1.0851422222034977e-05, + "loss": 0.886, + "step": 4815 + }, + { + "epoch": 0.81, + "learning_rate": 1.0846012167781538e-05, + "loss": 0.8473, + "step": 4816 + }, + { + "epoch": 0.81, + "learning_rate": 1.0840601864114873e-05, + "loss": 0.8243, + "step": 4817 + }, + { + "epoch": 0.81, + "learning_rate": 1.0835191312629994e-05, + "loss": 0.8361, + "step": 4818 + }, + { + "epoch": 0.81, + "learning_rate": 1.0829780514921982e-05, + "loss": 0.9015, + "step": 4819 + }, + { + "epoch": 0.81, + "learning_rate": 1.0824369472586005e-05, + "loss": 0.8962, + "step": 4820 + }, + { + "epoch": 0.81, + "learning_rate": 1.0818958187217288e-05, + "loss": 0.8282, + "step": 4821 + }, + { + "epoch": 0.81, + "learning_rate": 1.0813546660411143e-05, + "loss": 0.8883, + "step": 4822 + }, + { + "epoch": 0.81, + "learning_rate": 1.0808134893762935e-05, + "loss": 0.9309, + "step": 4823 + }, + { + "epoch": 0.81, + "learning_rate": 1.0802722888868116e-05, + "loss": 0.9382, + "step": 4824 + }, + { + "epoch": 0.81, + "learning_rate": 1.0797310647322199e-05, + "loss": 0.954, + "step": 4825 + }, + { + "epoch": 0.81, + "learning_rate": 1.0791898170720772e-05, + "loss": 0.9377, + "step": 4826 + }, + { + "epoch": 0.81, + "learning_rate": 1.0786485460659486e-05, + "loss": 0.9616, + "step": 4827 + }, + { + "epoch": 0.81, + "learning_rate": 1.0781072518734065e-05, + "loss": 0.9151, + "step": 4828 + }, + { + "epoch": 0.81, + "learning_rate": 1.0775659346540303e-05, + "loss": 0.9416, + "step": 4829 + }, + { + "epoch": 0.81, + "learning_rate": 1.0770245945674061e-05, + "loss": 0.8666, + "step": 4830 + }, + { + "epoch": 0.81, + "learning_rate": 1.0764832317731262e-05, + "loss": 0.8911, + "step": 4831 + }, + { + "epoch": 0.81, + "learning_rate": 1.07594184643079e-05, + "loss": 0.8723, + "step": 4832 + }, + { + "epoch": 0.81, + "learning_rate": 1.075400438700004e-05, + "loss": 1.0159, + "step": 4833 + }, + { + "epoch": 0.81, + "learning_rate": 1.0748590087403805e-05, + "loss": 0.8967, + "step": 4834 + }, + { + "epoch": 0.81, + "learning_rate": 1.074317556711539e-05, + "loss": 0.8606, + "step": 4835 + }, + { + "epoch": 0.81, + "learning_rate": 1.0737760827731048e-05, + "loss": 0.8944, + "step": 4836 + }, + { + "epoch": 0.81, + "learning_rate": 1.0732345870847104e-05, + "loss": 0.8757, + "step": 4837 + }, + { + "epoch": 0.81, + "learning_rate": 1.0726930698059944e-05, + "loss": 0.9204, + "step": 4838 + }, + { + "epoch": 0.81, + "learning_rate": 1.0721515310966017e-05, + "loss": 0.9023, + "step": 4839 + }, + { + "epoch": 0.81, + "learning_rate": 1.0716099711161833e-05, + "loss": 0.8812, + "step": 4840 + }, + { + "epoch": 0.81, + "learning_rate": 1.0710683900243973e-05, + "loss": 0.815, + "step": 4841 + }, + { + "epoch": 0.81, + "learning_rate": 1.0705267879809069e-05, + "loss": 0.8954, + "step": 4842 + }, + { + "epoch": 0.81, + "learning_rate": 1.0699851651453826e-05, + "loss": 0.8781, + "step": 4843 + }, + { + "epoch": 0.81, + "learning_rate": 1.0694435216774997e-05, + "loss": 0.9039, + "step": 4844 + }, + { + "epoch": 0.81, + "learning_rate": 1.068901857736941e-05, + "loss": 0.8911, + "step": 4845 + }, + { + "epoch": 0.81, + "learning_rate": 1.0683601734833943e-05, + "loss": 0.8887, + "step": 4846 + }, + { + "epoch": 0.81, + "learning_rate": 1.0678184690765545e-05, + "loss": 0.864, + "step": 4847 + }, + { + "epoch": 0.81, + "learning_rate": 1.0672767446761208e-05, + "loss": 0.8787, + "step": 4848 + }, + { + "epoch": 0.81, + "learning_rate": 1.0667350004417996e-05, + "loss": 0.932, + "step": 4849 + }, + { + "epoch": 0.81, + "learning_rate": 1.0661932365333028e-05, + "loss": 0.4002, + "step": 4850 + }, + { + "epoch": 0.81, + "learning_rate": 1.0656514531103484e-05, + "loss": 0.8761, + "step": 4851 + }, + { + "epoch": 0.81, + "learning_rate": 1.065109650332659e-05, + "loss": 0.9551, + "step": 4852 + }, + { + "epoch": 0.81, + "learning_rate": 1.0645678283599643e-05, + "loss": 0.9198, + "step": 4853 + }, + { + "epoch": 0.81, + "learning_rate": 1.064025987351999e-05, + "loss": 0.9084, + "step": 4854 + }, + { + "epoch": 0.81, + "learning_rate": 1.0634841274685035e-05, + "loss": 0.855, + "step": 4855 + }, + { + "epoch": 0.81, + "learning_rate": 1.0629422488692236e-05, + "loss": 0.9365, + "step": 4856 + }, + { + "epoch": 0.81, + "learning_rate": 1.062400351713911e-05, + "loss": 0.8903, + "step": 4857 + }, + { + "epoch": 0.81, + "learning_rate": 1.0618584361623224e-05, + "loss": 0.3702, + "step": 4858 + }, + { + "epoch": 0.81, + "learning_rate": 1.0613165023742206e-05, + "loss": 0.8308, + "step": 4859 + }, + { + "epoch": 0.81, + "learning_rate": 1.0607745505093727e-05, + "loss": 0.9538, + "step": 4860 + }, + { + "epoch": 0.81, + "learning_rate": 1.0602325807275521e-05, + "loss": 0.899, + "step": 4861 + }, + { + "epoch": 0.82, + "learning_rate": 1.0596905931885374e-05, + "loss": 0.8709, + "step": 4862 + }, + { + "epoch": 0.82, + "learning_rate": 1.0591485880521119e-05, + "loss": 0.8553, + "step": 4863 + }, + { + "epoch": 0.82, + "learning_rate": 1.0586065654780645e-05, + "loss": 0.886, + "step": 4864 + }, + { + "epoch": 0.82, + "learning_rate": 1.058064525626189e-05, + "loss": 0.9474, + "step": 4865 + }, + { + "epoch": 0.82, + "learning_rate": 1.0575224686562846e-05, + "loss": 0.9085, + "step": 4866 + }, + { + "epoch": 0.82, + "learning_rate": 1.056980394728155e-05, + "loss": 0.3902, + "step": 4867 + }, + { + "epoch": 0.82, + "learning_rate": 1.0564383040016098e-05, + "loss": 0.8572, + "step": 4868 + }, + { + "epoch": 0.82, + "learning_rate": 1.0558961966364623e-05, + "loss": 0.9267, + "step": 4869 + }, + { + "epoch": 0.82, + "learning_rate": 1.055354072792532e-05, + "loss": 0.8654, + "step": 4870 + }, + { + "epoch": 0.82, + "learning_rate": 1.0548119326296424e-05, + "loss": 0.8927, + "step": 4871 + }, + { + "epoch": 0.82, + "learning_rate": 1.0542697763076226e-05, + "loss": 0.8819, + "step": 4872 + }, + { + "epoch": 0.82, + "learning_rate": 1.0537276039863049e-05, + "loss": 0.8866, + "step": 4873 + }, + { + "epoch": 0.82, + "learning_rate": 1.0531854158255286e-05, + "loss": 0.8653, + "step": 4874 + }, + { + "epoch": 0.82, + "learning_rate": 1.0526432119851356e-05, + "loss": 0.9257, + "step": 4875 + }, + { + "epoch": 0.82, + "learning_rate": 1.0521009926249736e-05, + "loss": 0.8934, + "step": 4876 + }, + { + "epoch": 0.82, + "learning_rate": 1.051558757904894e-05, + "loss": 0.9787, + "step": 4877 + }, + { + "epoch": 0.82, + "learning_rate": 1.0510165079847544e-05, + "loss": 0.9064, + "step": 4878 + }, + { + "epoch": 0.82, + "learning_rate": 1.0504742430244144e-05, + "loss": 0.8953, + "step": 4879 + }, + { + "epoch": 0.82, + "learning_rate": 1.0499319631837403e-05, + "loss": 0.3844, + "step": 4880 + }, + { + "epoch": 0.82, + "learning_rate": 1.0493896686226018e-05, + "loss": 0.8435, + "step": 4881 + }, + { + "epoch": 0.82, + "learning_rate": 1.0488473595008723e-05, + "loss": 0.8787, + "step": 4882 + }, + { + "epoch": 0.82, + "learning_rate": 1.048305035978431e-05, + "loss": 0.8961, + "step": 4883 + }, + { + "epoch": 0.82, + "learning_rate": 1.0477626982151603e-05, + "loss": 0.9483, + "step": 4884 + }, + { + "epoch": 0.82, + "learning_rate": 1.0472203463709467e-05, + "loss": 0.9318, + "step": 4885 + }, + { + "epoch": 0.82, + "learning_rate": 1.0466779806056816e-05, + "loss": 0.9505, + "step": 4886 + }, + { + "epoch": 0.82, + "learning_rate": 1.04613560107926e-05, + "loss": 0.9312, + "step": 4887 + }, + { + "epoch": 0.82, + "learning_rate": 1.045593207951581e-05, + "loss": 0.8462, + "step": 4888 + }, + { + "epoch": 0.82, + "learning_rate": 1.0450508013825476e-05, + "loss": 0.8609, + "step": 4889 + }, + { + "epoch": 0.82, + "learning_rate": 1.0445083815320672e-05, + "loss": 0.8515, + "step": 4890 + }, + { + "epoch": 0.82, + "learning_rate": 1.0439659485600508e-05, + "loss": 0.8857, + "step": 4891 + }, + { + "epoch": 0.82, + "learning_rate": 1.0434235026264132e-05, + "loss": 0.8999, + "step": 4892 + }, + { + "epoch": 0.82, + "learning_rate": 1.0428810438910733e-05, + "loss": 0.8347, + "step": 4893 + }, + { + "epoch": 0.82, + "learning_rate": 1.0423385725139532e-05, + "loss": 0.8204, + "step": 4894 + }, + { + "epoch": 0.82, + "learning_rate": 1.0417960886549799e-05, + "loss": 0.8564, + "step": 4895 + }, + { + "epoch": 0.82, + "learning_rate": 1.0412535924740824e-05, + "loss": 0.8538, + "step": 4896 + }, + { + "epoch": 0.82, + "learning_rate": 1.0407110841311948e-05, + "loss": 0.9223, + "step": 4897 + }, + { + "epoch": 0.82, + "learning_rate": 1.040168563786254e-05, + "loss": 0.8521, + "step": 4898 + }, + { + "epoch": 0.82, + "learning_rate": 1.0396260315992009e-05, + "loss": 0.8961, + "step": 4899 + }, + { + "epoch": 0.82, + "learning_rate": 1.0390834877299793e-05, + "loss": 0.8504, + "step": 4900 + }, + { + "epoch": 0.82, + "learning_rate": 1.0385409323385373e-05, + "loss": 0.9209, + "step": 4901 + }, + { + "epoch": 0.82, + "learning_rate": 1.0379983655848254e-05, + "loss": 0.3875, + "step": 4902 + }, + { + "epoch": 0.82, + "learning_rate": 1.0374557876287984e-05, + "loss": 0.8582, + "step": 4903 + }, + { + "epoch": 0.82, + "learning_rate": 1.0369131986304138e-05, + "loss": 0.9099, + "step": 4904 + }, + { + "epoch": 0.82, + "learning_rate": 1.0363705987496322e-05, + "loss": 0.8911, + "step": 4905 + }, + { + "epoch": 0.82, + "learning_rate": 1.0358279881464182e-05, + "loss": 0.8974, + "step": 4906 + }, + { + "epoch": 0.82, + "learning_rate": 1.0352853669807385e-05, + "loss": 0.8323, + "step": 4907 + }, + { + "epoch": 0.82, + "learning_rate": 1.0347427354125643e-05, + "loss": 0.8752, + "step": 4908 + }, + { + "epoch": 0.82, + "learning_rate": 1.0342000936018682e-05, + "loss": 0.9477, + "step": 4909 + }, + { + "epoch": 0.82, + "learning_rate": 1.0336574417086271e-05, + "loss": 0.8883, + "step": 4910 + }, + { + "epoch": 0.82, + "learning_rate": 1.0331147798928205e-05, + "loss": 0.8922, + "step": 4911 + }, + { + "epoch": 0.82, + "learning_rate": 1.0325721083144306e-05, + "loss": 0.8135, + "step": 4912 + }, + { + "epoch": 0.82, + "learning_rate": 1.0320294271334428e-05, + "loss": 0.8759, + "step": 4913 + }, + { + "epoch": 0.82, + "learning_rate": 1.0314867365098451e-05, + "loss": 0.9168, + "step": 4914 + }, + { + "epoch": 0.82, + "learning_rate": 1.0309440366036284e-05, + "loss": 0.8765, + "step": 4915 + }, + { + "epoch": 0.82, + "learning_rate": 1.0304013275747863e-05, + "loss": 0.9429, + "step": 4916 + }, + { + "epoch": 0.82, + "learning_rate": 1.0298586095833152e-05, + "loss": 0.9267, + "step": 4917 + }, + { + "epoch": 0.82, + "learning_rate": 1.0293158827892136e-05, + "loss": 0.8168, + "step": 4918 + }, + { + "epoch": 0.82, + "learning_rate": 1.0287731473524836e-05, + "loss": 0.9372, + "step": 4919 + }, + { + "epoch": 0.82, + "learning_rate": 1.028230403433129e-05, + "loss": 0.8806, + "step": 4920 + }, + { + "epoch": 0.82, + "learning_rate": 1.0276876511911564e-05, + "loss": 0.8612, + "step": 4921 + }, + { + "epoch": 0.83, + "learning_rate": 1.0271448907865748e-05, + "loss": 0.8708, + "step": 4922 + }, + { + "epoch": 0.83, + "learning_rate": 1.0266021223793955e-05, + "loss": 0.367, + "step": 4923 + }, + { + "epoch": 0.83, + "learning_rate": 1.026059346129633e-05, + "loss": 0.9494, + "step": 4924 + }, + { + "epoch": 0.83, + "learning_rate": 1.0255165621973026e-05, + "loss": 0.8252, + "step": 4925 + }, + { + "epoch": 0.83, + "learning_rate": 1.024973770742423e-05, + "loss": 0.8899, + "step": 4926 + }, + { + "epoch": 0.83, + "learning_rate": 1.0244309719250149e-05, + "loss": 0.8893, + "step": 4927 + }, + { + "epoch": 0.83, + "learning_rate": 1.0238881659051007e-05, + "loss": 0.9239, + "step": 4928 + }, + { + "epoch": 0.83, + "learning_rate": 1.0233453528427055e-05, + "loss": 0.3885, + "step": 4929 + }, + { + "epoch": 0.83, + "learning_rate": 1.0228025328978563e-05, + "loss": 0.8838, + "step": 4930 + }, + { + "epoch": 0.83, + "learning_rate": 1.0222597062305824e-05, + "loss": 0.8591, + "step": 4931 + }, + { + "epoch": 0.83, + "learning_rate": 1.0217168730009142e-05, + "loss": 0.8789, + "step": 4932 + }, + { + "epoch": 0.83, + "learning_rate": 1.0211740333688852e-05, + "loss": 0.8623, + "step": 4933 + }, + { + "epoch": 0.83, + "learning_rate": 1.0206311874945291e-05, + "loss": 0.9548, + "step": 4934 + }, + { + "epoch": 0.83, + "learning_rate": 1.0200883355378839e-05, + "loss": 0.9017, + "step": 4935 + }, + { + "epoch": 0.83, + "learning_rate": 1.0195454776589872e-05, + "loss": 0.8881, + "step": 4936 + }, + { + "epoch": 0.83, + "learning_rate": 1.0190026140178796e-05, + "loss": 0.8661, + "step": 4937 + }, + { + "epoch": 0.83, + "learning_rate": 1.0184597447746025e-05, + "loss": 0.9158, + "step": 4938 + }, + { + "epoch": 0.83, + "learning_rate": 1.0179168700892001e-05, + "loss": 0.9384, + "step": 4939 + }, + { + "epoch": 0.83, + "learning_rate": 1.0173739901217168e-05, + "loss": 0.8696, + "step": 4940 + }, + { + "epoch": 0.83, + "learning_rate": 1.0168311050322e-05, + "loss": 0.3787, + "step": 4941 + }, + { + "epoch": 0.83, + "learning_rate": 1.0162882149806967e-05, + "loss": 0.9069, + "step": 4942 + }, + { + "epoch": 0.83, + "learning_rate": 1.0157453201272583e-05, + "loss": 0.9417, + "step": 4943 + }, + { + "epoch": 0.83, + "learning_rate": 1.0152024206319344e-05, + "loss": 0.8846, + "step": 4944 + }, + { + "epoch": 0.83, + "learning_rate": 1.0146595166547784e-05, + "loss": 0.9362, + "step": 4945 + }, + { + "epoch": 0.83, + "learning_rate": 1.0141166083558436e-05, + "loss": 0.9034, + "step": 4946 + }, + { + "epoch": 0.83, + "learning_rate": 1.0135736958951848e-05, + "loss": 0.9494, + "step": 4947 + }, + { + "epoch": 0.83, + "learning_rate": 1.0130307794328586e-05, + "loss": 0.9104, + "step": 4948 + }, + { + "epoch": 0.83, + "learning_rate": 1.0124878591289228e-05, + "loss": 0.8694, + "step": 4949 + }, + { + "epoch": 0.83, + "learning_rate": 1.0119449351434353e-05, + "loss": 0.9209, + "step": 4950 + }, + { + "epoch": 0.83, + "learning_rate": 1.0114020076364563e-05, + "loss": 0.8016, + "step": 4951 + }, + { + "epoch": 0.83, + "learning_rate": 1.0108590767680464e-05, + "loss": 0.9233, + "step": 4952 + }, + { + "epoch": 0.83, + "learning_rate": 1.0103161426982668e-05, + "loss": 0.9105, + "step": 4953 + }, + { + "epoch": 0.83, + "learning_rate": 1.0097732055871807e-05, + "loss": 0.9471, + "step": 4954 + }, + { + "epoch": 0.83, + "learning_rate": 1.0092302655948513e-05, + "loss": 0.8771, + "step": 4955 + }, + { + "epoch": 0.83, + "learning_rate": 1.0086873228813434e-05, + "loss": 0.8689, + "step": 4956 + }, + { + "epoch": 0.83, + "learning_rate": 1.0081443776067216e-05, + "loss": 0.9537, + "step": 4957 + }, + { + "epoch": 0.83, + "learning_rate": 1.0076014299310523e-05, + "loss": 0.9365, + "step": 4958 + }, + { + "epoch": 0.83, + "learning_rate": 1.0070584800144022e-05, + "loss": 0.8282, + "step": 4959 + }, + { + "epoch": 0.83, + "learning_rate": 1.0065155280168382e-05, + "loss": 0.4214, + "step": 4960 + }, + { + "epoch": 0.83, + "learning_rate": 1.0059725740984285e-05, + "loss": 0.9032, + "step": 4961 + }, + { + "epoch": 0.83, + "learning_rate": 1.0054296184192414e-05, + "loss": 0.8511, + "step": 4962 + }, + { + "epoch": 0.83, + "learning_rate": 1.0048866611393462e-05, + "loss": 0.9012, + "step": 4963 + }, + { + "epoch": 0.83, + "learning_rate": 1.0043437024188123e-05, + "loss": 0.9001, + "step": 4964 + }, + { + "epoch": 0.83, + "learning_rate": 1.0038007424177092e-05, + "loss": 0.933, + "step": 4965 + }, + { + "epoch": 0.83, + "learning_rate": 1.0032577812961077e-05, + "loss": 0.912, + "step": 4966 + }, + { + "epoch": 0.83, + "learning_rate": 1.0027148192140783e-05, + "loss": 0.9428, + "step": 4967 + }, + { + "epoch": 0.83, + "learning_rate": 1.0021718563316917e-05, + "loss": 0.9019, + "step": 4968 + }, + { + "epoch": 0.83, + "learning_rate": 1.0016288928090194e-05, + "loss": 0.892, + "step": 4969 + }, + { + "epoch": 0.83, + "learning_rate": 1.0010859288061322e-05, + "loss": 0.8984, + "step": 4970 + }, + { + "epoch": 0.83, + "learning_rate": 1.0005429644831021e-05, + "loss": 0.9004, + "step": 4971 + }, + { + "epoch": 0.83, + "learning_rate": 1e-05, + "loss": 0.9076, + "step": 4972 + }, + { + "epoch": 0.83, + "learning_rate": 9.994570355168982e-06, + "loss": 0.8628, + "step": 4973 + }, + { + "epoch": 0.83, + "learning_rate": 9.98914071193868e-06, + "loss": 0.8597, + "step": 4974 + }, + { + "epoch": 0.83, + "learning_rate": 9.98371107190981e-06, + "loss": 0.9043, + "step": 4975 + }, + { + "epoch": 0.83, + "learning_rate": 9.978281436683086e-06, + "loss": 0.8737, + "step": 4976 + }, + { + "epoch": 0.83, + "learning_rate": 9.972851807859218e-06, + "loss": 0.922, + "step": 4977 + }, + { + "epoch": 0.83, + "learning_rate": 9.967422187038923e-06, + "loss": 0.8744, + "step": 4978 + }, + { + "epoch": 0.83, + "learning_rate": 9.96199257582291e-06, + "loss": 0.8336, + "step": 4979 + }, + { + "epoch": 0.83, + "learning_rate": 9.95656297581188e-06, + "loss": 0.8826, + "step": 4980 + }, + { + "epoch": 0.84, + "learning_rate": 9.951133388606542e-06, + "loss": 0.8656, + "step": 4981 + }, + { + "epoch": 0.84, + "learning_rate": 9.94570381580759e-06, + "loss": 0.8141, + "step": 4982 + }, + { + "epoch": 0.84, + "learning_rate": 9.94027425901572e-06, + "loss": 0.9004, + "step": 4983 + }, + { + "epoch": 0.84, + "learning_rate": 9.934844719831623e-06, + "loss": 0.877, + "step": 4984 + }, + { + "epoch": 0.84, + "learning_rate": 9.92941519985598e-06, + "loss": 0.8903, + "step": 4985 + }, + { + "epoch": 0.84, + "learning_rate": 9.923985700689477e-06, + "loss": 0.8798, + "step": 4986 + }, + { + "epoch": 0.84, + "learning_rate": 9.918556223932785e-06, + "loss": 0.8917, + "step": 4987 + }, + { + "epoch": 0.84, + "learning_rate": 9.913126771186569e-06, + "loss": 0.8633, + "step": 4988 + }, + { + "epoch": 0.84, + "learning_rate": 9.90769734405149e-06, + "loss": 0.8787, + "step": 4989 + }, + { + "epoch": 0.84, + "learning_rate": 9.902267944128197e-06, + "loss": 0.8652, + "step": 4990 + }, + { + "epoch": 0.84, + "learning_rate": 9.896838573017337e-06, + "loss": 0.9192, + "step": 4991 + }, + { + "epoch": 0.84, + "learning_rate": 9.891409232319543e-06, + "loss": 0.9101, + "step": 4992 + }, + { + "epoch": 0.84, + "learning_rate": 9.885979923635438e-06, + "loss": 0.9122, + "step": 4993 + }, + { + "epoch": 0.84, + "learning_rate": 9.880550648565648e-06, + "loss": 0.8509, + "step": 4994 + }, + { + "epoch": 0.84, + "learning_rate": 9.875121408710774e-06, + "loss": 0.9345, + "step": 4995 + }, + { + "epoch": 0.84, + "learning_rate": 9.869692205671415e-06, + "loss": 0.8611, + "step": 4996 + }, + { + "epoch": 0.84, + "learning_rate": 9.864263041048154e-06, + "loss": 0.8968, + "step": 4997 + }, + { + "epoch": 0.84, + "learning_rate": 9.858833916441568e-06, + "loss": 0.9318, + "step": 4998 + }, + { + "epoch": 0.84, + "learning_rate": 9.85340483345222e-06, + "loss": 0.8913, + "step": 4999 + }, + { + "epoch": 0.84, + "learning_rate": 9.847975793680657e-06, + "loss": 0.8637, + "step": 5000 + }, + { + "epoch": 0.84, + "learning_rate": 9.842546798727417e-06, + "loss": 0.9016, + "step": 5001 + }, + { + "epoch": 0.84, + "learning_rate": 9.837117850193032e-06, + "loss": 0.9472, + "step": 5002 + }, + { + "epoch": 0.84, + "learning_rate": 9.831688949678004e-06, + "loss": 0.9084, + "step": 5003 + }, + { + "epoch": 0.84, + "learning_rate": 9.826260098782836e-06, + "loss": 0.8702, + "step": 5004 + }, + { + "epoch": 0.84, + "learning_rate": 9.820831299108002e-06, + "loss": 0.9162, + "step": 5005 + }, + { + "epoch": 0.84, + "learning_rate": 9.815402552253977e-06, + "loss": 0.9123, + "step": 5006 + }, + { + "epoch": 0.84, + "learning_rate": 9.809973859821207e-06, + "loss": 0.8731, + "step": 5007 + }, + { + "epoch": 0.84, + "learning_rate": 9.804545223410133e-06, + "loss": 0.9107, + "step": 5008 + }, + { + "epoch": 0.84, + "learning_rate": 9.799116644621163e-06, + "loss": 0.8829, + "step": 5009 + }, + { + "epoch": 0.84, + "learning_rate": 9.793688125054709e-06, + "loss": 0.9118, + "step": 5010 + }, + { + "epoch": 0.84, + "learning_rate": 9.788259666311152e-06, + "loss": 0.9044, + "step": 5011 + }, + { + "epoch": 0.84, + "learning_rate": 9.78283126999086e-06, + "loss": 0.9074, + "step": 5012 + }, + { + "epoch": 0.84, + "learning_rate": 9.777402937694178e-06, + "loss": 0.8431, + "step": 5013 + }, + { + "epoch": 0.84, + "learning_rate": 9.771974671021438e-06, + "loss": 0.8651, + "step": 5014 + }, + { + "epoch": 0.84, + "learning_rate": 9.766546471572947e-06, + "loss": 0.9109, + "step": 5015 + }, + { + "epoch": 0.84, + "learning_rate": 9.761118340949e-06, + "loss": 0.8936, + "step": 5016 + }, + { + "epoch": 0.84, + "learning_rate": 9.755690280749854e-06, + "loss": 0.9143, + "step": 5017 + }, + { + "epoch": 0.84, + "learning_rate": 9.750262292575771e-06, + "loss": 0.9021, + "step": 5018 + }, + { + "epoch": 0.84, + "learning_rate": 9.744834378026977e-06, + "loss": 0.8852, + "step": 5019 + }, + { + "epoch": 0.84, + "learning_rate": 9.739406538703671e-06, + "loss": 0.9263, + "step": 5020 + }, + { + "epoch": 0.84, + "learning_rate": 9.733978776206046e-06, + "loss": 0.8914, + "step": 5021 + }, + { + "epoch": 0.84, + "learning_rate": 9.728551092134255e-06, + "loss": 0.8884, + "step": 5022 + }, + { + "epoch": 0.84, + "learning_rate": 9.723123488088441e-06, + "loss": 0.8532, + "step": 5023 + }, + { + "epoch": 0.84, + "learning_rate": 9.717695965668714e-06, + "loss": 0.8916, + "step": 5024 + }, + { + "epoch": 0.84, + "learning_rate": 9.712268526475164e-06, + "loss": 0.8899, + "step": 5025 + }, + { + "epoch": 0.84, + "learning_rate": 9.706841172107864e-06, + "loss": 0.9273, + "step": 5026 + }, + { + "epoch": 0.84, + "learning_rate": 9.701413904166852e-06, + "loss": 0.3896, + "step": 5027 + }, + { + "epoch": 0.84, + "learning_rate": 9.695986724252139e-06, + "loss": 0.8244, + "step": 5028 + }, + { + "epoch": 0.84, + "learning_rate": 9.69055963396372e-06, + "loss": 0.8836, + "step": 5029 + }, + { + "epoch": 0.84, + "learning_rate": 9.685132634901552e-06, + "loss": 0.8949, + "step": 5030 + }, + { + "epoch": 0.84, + "learning_rate": 9.679705728665577e-06, + "loss": 0.9178, + "step": 5031 + }, + { + "epoch": 0.84, + "learning_rate": 9.674278916855697e-06, + "loss": 0.8692, + "step": 5032 + }, + { + "epoch": 0.84, + "learning_rate": 9.668852201071797e-06, + "loss": 0.8455, + "step": 5033 + }, + { + "epoch": 0.84, + "learning_rate": 9.663425582913732e-06, + "loss": 0.8898, + "step": 5034 + }, + { + "epoch": 0.84, + "learning_rate": 9.65799906398132e-06, + "loss": 0.8989, + "step": 5035 + }, + { + "epoch": 0.84, + "learning_rate": 9.65257264587436e-06, + "loss": 0.8344, + "step": 5036 + }, + { + "epoch": 0.84, + "learning_rate": 9.647146330192617e-06, + "loss": 0.9024, + "step": 5037 + }, + { + "epoch": 0.84, + "learning_rate": 9.641720118535821e-06, + "loss": 0.8706, + "step": 5038 + }, + { + "epoch": 0.84, + "learning_rate": 9.636294012503682e-06, + "loss": 0.9068, + "step": 5039 + }, + { + "epoch": 0.84, + "learning_rate": 9.630868013695866e-06, + "loss": 0.9211, + "step": 5040 + }, + { + "epoch": 0.85, + "learning_rate": 9.625442123712019e-06, + "loss": 0.9585, + "step": 5041 + }, + { + "epoch": 0.85, + "learning_rate": 9.620016344151746e-06, + "loss": 0.908, + "step": 5042 + }, + { + "epoch": 0.85, + "learning_rate": 9.614590676614628e-06, + "loss": 0.8565, + "step": 5043 + }, + { + "epoch": 0.85, + "learning_rate": 9.609165122700209e-06, + "loss": 0.9496, + "step": 5044 + }, + { + "epoch": 0.85, + "learning_rate": 9.603739684007995e-06, + "loss": 0.8356, + "step": 5045 + }, + { + "epoch": 0.85, + "learning_rate": 9.598314362137464e-06, + "loss": 0.9001, + "step": 5046 + }, + { + "epoch": 0.85, + "learning_rate": 9.592889158688058e-06, + "loss": 0.906, + "step": 5047 + }, + { + "epoch": 0.85, + "learning_rate": 9.587464075259181e-06, + "loss": 0.4076, + "step": 5048 + }, + { + "epoch": 0.85, + "learning_rate": 9.582039113450208e-06, + "loss": 0.8419, + "step": 5049 + }, + { + "epoch": 0.85, + "learning_rate": 9.57661427486047e-06, + "loss": 0.8924, + "step": 5050 + }, + { + "epoch": 0.85, + "learning_rate": 9.571189561089268e-06, + "loss": 0.9243, + "step": 5051 + }, + { + "epoch": 0.85, + "learning_rate": 9.56576497373587e-06, + "loss": 0.3396, + "step": 5052 + }, + { + "epoch": 0.85, + "learning_rate": 9.560340514399494e-06, + "loss": 0.8672, + "step": 5053 + }, + { + "epoch": 0.85, + "learning_rate": 9.554916184679331e-06, + "loss": 0.8739, + "step": 5054 + }, + { + "epoch": 0.85, + "learning_rate": 9.549491986174525e-06, + "loss": 0.8874, + "step": 5055 + }, + { + "epoch": 0.85, + "learning_rate": 9.544067920484196e-06, + "loss": 0.8594, + "step": 5056 + }, + { + "epoch": 0.85, + "learning_rate": 9.538643989207405e-06, + "loss": 0.9046, + "step": 5057 + }, + { + "epoch": 0.85, + "learning_rate": 9.533220193943186e-06, + "loss": 0.9047, + "step": 5058 + }, + { + "epoch": 0.85, + "learning_rate": 9.527796536290536e-06, + "loss": 0.3621, + "step": 5059 + }, + { + "epoch": 0.85, + "learning_rate": 9.5223730178484e-06, + "loss": 0.368, + "step": 5060 + }, + { + "epoch": 0.85, + "learning_rate": 9.516949640215694e-06, + "loss": 0.9186, + "step": 5061 + }, + { + "epoch": 0.85, + "learning_rate": 9.511526404991278e-06, + "loss": 0.9323, + "step": 5062 + }, + { + "epoch": 0.85, + "learning_rate": 9.506103313773987e-06, + "loss": 0.9257, + "step": 5063 + }, + { + "epoch": 0.85, + "learning_rate": 9.5006803681626e-06, + "loss": 0.8909, + "step": 5064 + }, + { + "epoch": 0.85, + "learning_rate": 9.49525756975586e-06, + "loss": 0.8815, + "step": 5065 + }, + { + "epoch": 0.85, + "learning_rate": 9.489834920152458e-06, + "loss": 0.8907, + "step": 5066 + }, + { + "epoch": 0.85, + "learning_rate": 9.48441242095106e-06, + "loss": 0.8961, + "step": 5067 + }, + { + "epoch": 0.85, + "learning_rate": 9.478990073750267e-06, + "loss": 0.9271, + "step": 5068 + }, + { + "epoch": 0.85, + "learning_rate": 9.473567880148647e-06, + "loss": 0.9397, + "step": 5069 + }, + { + "epoch": 0.85, + "learning_rate": 9.468145841744717e-06, + "loss": 0.9102, + "step": 5070 + }, + { + "epoch": 0.85, + "learning_rate": 9.462723960136953e-06, + "loss": 0.8694, + "step": 5071 + }, + { + "epoch": 0.85, + "learning_rate": 9.45730223692378e-06, + "loss": 0.8979, + "step": 5072 + }, + { + "epoch": 0.85, + "learning_rate": 9.45188067370358e-06, + "loss": 0.8972, + "step": 5073 + }, + { + "epoch": 0.85, + "learning_rate": 9.44645927207468e-06, + "loss": 0.9378, + "step": 5074 + }, + { + "epoch": 0.85, + "learning_rate": 9.441038033635379e-06, + "loss": 0.9362, + "step": 5075 + }, + { + "epoch": 0.85, + "learning_rate": 9.435616959983905e-06, + "loss": 0.923, + "step": 5076 + }, + { + "epoch": 0.85, + "learning_rate": 9.430196052718454e-06, + "loss": 0.9458, + "step": 5077 + }, + { + "epoch": 0.85, + "learning_rate": 9.424775313437158e-06, + "loss": 0.9343, + "step": 5078 + }, + { + "epoch": 0.85, + "learning_rate": 9.419354743738114e-06, + "loss": 0.3656, + "step": 5079 + }, + { + "epoch": 0.85, + "learning_rate": 9.413934345219358e-06, + "loss": 0.9027, + "step": 5080 + }, + { + "epoch": 0.85, + "learning_rate": 9.408514119478884e-06, + "loss": 0.9027, + "step": 5081 + }, + { + "epoch": 0.85, + "learning_rate": 9.403094068114627e-06, + "loss": 0.8767, + "step": 5082 + }, + { + "epoch": 0.85, + "learning_rate": 9.397674192724479e-06, + "loss": 0.8266, + "step": 5083 + }, + { + "epoch": 0.85, + "learning_rate": 9.392254494906276e-06, + "loss": 0.9044, + "step": 5084 + }, + { + "epoch": 0.85, + "learning_rate": 9.386834976257797e-06, + "loss": 0.8844, + "step": 5085 + }, + { + "epoch": 0.85, + "learning_rate": 9.381415638376778e-06, + "loss": 0.8284, + "step": 5086 + }, + { + "epoch": 0.85, + "learning_rate": 9.375996482860893e-06, + "loss": 0.9313, + "step": 5087 + }, + { + "epoch": 0.85, + "learning_rate": 9.370577511307766e-06, + "loss": 0.9256, + "step": 5088 + }, + { + "epoch": 0.85, + "learning_rate": 9.365158725314968e-06, + "loss": 0.9128, + "step": 5089 + }, + { + "epoch": 0.85, + "learning_rate": 9.35974012648001e-06, + "loss": 0.9432, + "step": 5090 + }, + { + "epoch": 0.85, + "learning_rate": 9.354321716400357e-06, + "loss": 0.939, + "step": 5091 + }, + { + "epoch": 0.85, + "learning_rate": 9.348903496673413e-06, + "loss": 0.9008, + "step": 5092 + }, + { + "epoch": 0.85, + "learning_rate": 9.34348546889652e-06, + "loss": 0.9008, + "step": 5093 + }, + { + "epoch": 0.85, + "learning_rate": 9.338067634666975e-06, + "loss": 0.9168, + "step": 5094 + }, + { + "epoch": 0.85, + "learning_rate": 9.332649995582008e-06, + "loss": 0.8707, + "step": 5095 + }, + { + "epoch": 0.85, + "learning_rate": 9.327232553238797e-06, + "loss": 0.8893, + "step": 5096 + }, + { + "epoch": 0.85, + "learning_rate": 9.32181530923446e-06, + "loss": 0.8961, + "step": 5097 + }, + { + "epoch": 0.85, + "learning_rate": 9.316398265166057e-06, + "loss": 0.9081, + "step": 5098 + }, + { + "epoch": 0.85, + "learning_rate": 9.310981422630592e-06, + "loss": 0.8708, + "step": 5099 + }, + { + "epoch": 0.85, + "learning_rate": 9.305564783225006e-06, + "loss": 0.9462, + "step": 5100 + }, + { + "epoch": 0.86, + "learning_rate": 9.30014834854618e-06, + "loss": 0.8975, + "step": 5101 + }, + { + "epoch": 0.86, + "learning_rate": 9.294732120190935e-06, + "loss": 0.8462, + "step": 5102 + }, + { + "epoch": 0.86, + "learning_rate": 9.289316099756029e-06, + "loss": 0.8617, + "step": 5103 + }, + { + "epoch": 0.86, + "learning_rate": 9.28390028883817e-06, + "loss": 0.9136, + "step": 5104 + }, + { + "epoch": 0.86, + "learning_rate": 9.278484689033986e-06, + "loss": 0.9041, + "step": 5105 + }, + { + "epoch": 0.86, + "learning_rate": 9.273069301940059e-06, + "loss": 0.9002, + "step": 5106 + }, + { + "epoch": 0.86, + "learning_rate": 9.267654129152897e-06, + "loss": 0.8738, + "step": 5107 + }, + { + "epoch": 0.86, + "learning_rate": 9.262239172268952e-06, + "loss": 0.9652, + "step": 5108 + }, + { + "epoch": 0.86, + "learning_rate": 9.256824432884614e-06, + "loss": 0.8844, + "step": 5109 + }, + { + "epoch": 0.86, + "learning_rate": 9.251409912596196e-06, + "loss": 0.8801, + "step": 5110 + }, + { + "epoch": 0.86, + "learning_rate": 9.245995612999964e-06, + "loss": 0.8549, + "step": 5111 + }, + { + "epoch": 0.86, + "learning_rate": 9.240581535692102e-06, + "loss": 0.8946, + "step": 5112 + }, + { + "epoch": 0.86, + "learning_rate": 9.235167682268743e-06, + "loss": 0.8641, + "step": 5113 + }, + { + "epoch": 0.86, + "learning_rate": 9.229754054325944e-06, + "loss": 0.8584, + "step": 5114 + }, + { + "epoch": 0.86, + "learning_rate": 9.224340653459698e-06, + "loss": 0.8457, + "step": 5115 + }, + { + "epoch": 0.86, + "learning_rate": 9.218927481265936e-06, + "loss": 0.3343, + "step": 5116 + }, + { + "epoch": 0.86, + "learning_rate": 9.213514539340517e-06, + "loss": 0.9106, + "step": 5117 + }, + { + "epoch": 0.86, + "learning_rate": 9.20810182927923e-06, + "loss": 0.8851, + "step": 5118 + }, + { + "epoch": 0.86, + "learning_rate": 9.202689352677805e-06, + "loss": 0.8885, + "step": 5119 + }, + { + "epoch": 0.86, + "learning_rate": 9.197277111131888e-06, + "loss": 0.8905, + "step": 5120 + }, + { + "epoch": 0.86, + "learning_rate": 9.19186510623707e-06, + "loss": 0.8484, + "step": 5121 + }, + { + "epoch": 0.86, + "learning_rate": 9.186453339588862e-06, + "loss": 0.8667, + "step": 5122 + }, + { + "epoch": 0.86, + "learning_rate": 9.181041812782712e-06, + "loss": 0.8717, + "step": 5123 + }, + { + "epoch": 0.86, + "learning_rate": 9.175630527413999e-06, + "loss": 0.911, + "step": 5124 + }, + { + "epoch": 0.86, + "learning_rate": 9.17021948507802e-06, + "loss": 0.8561, + "step": 5125 + }, + { + "epoch": 0.86, + "learning_rate": 9.164808687370011e-06, + "loss": 0.8833, + "step": 5126 + }, + { + "epoch": 0.86, + "learning_rate": 9.15939813588513e-06, + "loss": 0.8921, + "step": 5127 + }, + { + "epoch": 0.86, + "learning_rate": 9.153987832218463e-06, + "loss": 0.9465, + "step": 5128 + }, + { + "epoch": 0.86, + "learning_rate": 9.148577777965028e-06, + "loss": 0.8999, + "step": 5129 + }, + { + "epoch": 0.86, + "learning_rate": 9.14316797471976e-06, + "loss": 0.9338, + "step": 5130 + }, + { + "epoch": 0.86, + "learning_rate": 9.137758424077525e-06, + "loss": 0.891, + "step": 5131 + }, + { + "epoch": 0.86, + "learning_rate": 9.132349127633125e-06, + "loss": 0.8992, + "step": 5132 + }, + { + "epoch": 0.86, + "learning_rate": 9.126940086981268e-06, + "loss": 0.9176, + "step": 5133 + }, + { + "epoch": 0.86, + "learning_rate": 9.121531303716598e-06, + "loss": 0.8493, + "step": 5134 + }, + { + "epoch": 0.86, + "learning_rate": 9.116122779433681e-06, + "loss": 0.8325, + "step": 5135 + }, + { + "epoch": 0.86, + "learning_rate": 9.11071451572701e-06, + "loss": 0.9265, + "step": 5136 + }, + { + "epoch": 0.86, + "learning_rate": 9.10530651419099e-06, + "loss": 0.8466, + "step": 5137 + }, + { + "epoch": 0.86, + "learning_rate": 9.099898776419963e-06, + "loss": 0.8691, + "step": 5138 + }, + { + "epoch": 0.86, + "learning_rate": 9.094491304008178e-06, + "loss": 0.8921, + "step": 5139 + }, + { + "epoch": 0.86, + "learning_rate": 9.089084098549827e-06, + "loss": 0.8928, + "step": 5140 + }, + { + "epoch": 0.86, + "learning_rate": 9.083677161639002e-06, + "loss": 0.9, + "step": 5141 + }, + { + "epoch": 0.86, + "learning_rate": 9.078270494869732e-06, + "loss": 0.8727, + "step": 5142 + }, + { + "epoch": 0.86, + "learning_rate": 9.072864099835947e-06, + "loss": 0.9055, + "step": 5143 + }, + { + "epoch": 0.86, + "learning_rate": 9.06745797813152e-06, + "loss": 0.869, + "step": 5144 + }, + { + "epoch": 0.86, + "learning_rate": 9.062052131350225e-06, + "loss": 0.9121, + "step": 5145 + }, + { + "epoch": 0.86, + "learning_rate": 9.056646561085766e-06, + "loss": 0.8972, + "step": 5146 + }, + { + "epoch": 0.86, + "learning_rate": 9.051241268931759e-06, + "loss": 0.8908, + "step": 5147 + }, + { + "epoch": 0.86, + "learning_rate": 9.045836256481742e-06, + "loss": 0.8877, + "step": 5148 + }, + { + "epoch": 0.86, + "learning_rate": 9.04043152532917e-06, + "loss": 0.8853, + "step": 5149 + }, + { + "epoch": 0.86, + "learning_rate": 9.035027077067413e-06, + "loss": 0.8771, + "step": 5150 + }, + { + "epoch": 0.86, + "learning_rate": 9.02962291328976e-06, + "loss": 0.4086, + "step": 5151 + }, + { + "epoch": 0.86, + "learning_rate": 9.024219035589415e-06, + "loss": 0.8979, + "step": 5152 + }, + { + "epoch": 0.86, + "learning_rate": 9.018815445559495e-06, + "loss": 0.9253, + "step": 5153 + }, + { + "epoch": 0.86, + "learning_rate": 9.013412144793039e-06, + "loss": 0.4001, + "step": 5154 + }, + { + "epoch": 0.86, + "learning_rate": 9.008009134882989e-06, + "loss": 0.8968, + "step": 5155 + }, + { + "epoch": 0.86, + "learning_rate": 9.002606417422218e-06, + "loss": 0.8646, + "step": 5156 + }, + { + "epoch": 0.86, + "learning_rate": 8.9972039940035e-06, + "loss": 0.9054, + "step": 5157 + }, + { + "epoch": 0.86, + "learning_rate": 8.991801866219523e-06, + "loss": 0.859, + "step": 5158 + }, + { + "epoch": 0.86, + "learning_rate": 8.986400035662897e-06, + "loss": 0.8592, + "step": 5159 + }, + { + "epoch": 0.87, + "learning_rate": 8.98099850392613e-06, + "loss": 0.9138, + "step": 5160 + }, + { + "epoch": 0.87, + "learning_rate": 8.975597272601658e-06, + "loss": 0.8706, + "step": 5161 + }, + { + "epoch": 0.87, + "learning_rate": 8.970196343281815e-06, + "loss": 0.9194, + "step": 5162 + }, + { + "epoch": 0.87, + "learning_rate": 8.96479571755885e-06, + "loss": 0.3968, + "step": 5163 + }, + { + "epoch": 0.87, + "learning_rate": 8.959395397024928e-06, + "loss": 0.8306, + "step": 5164 + }, + { + "epoch": 0.87, + "learning_rate": 8.953995383272123e-06, + "loss": 0.8641, + "step": 5165 + }, + { + "epoch": 0.87, + "learning_rate": 8.948595677892408e-06, + "loss": 0.8506, + "step": 5166 + }, + { + "epoch": 0.87, + "learning_rate": 8.94319628247768e-06, + "loss": 0.9153, + "step": 5167 + }, + { + "epoch": 0.87, + "learning_rate": 8.93779719861973e-06, + "loss": 0.8656, + "step": 5168 + }, + { + "epoch": 0.87, + "learning_rate": 8.93239842791027e-06, + "loss": 0.8794, + "step": 5169 + }, + { + "epoch": 0.87, + "learning_rate": 8.92699997194091e-06, + "loss": 0.8942, + "step": 5170 + }, + { + "epoch": 0.87, + "learning_rate": 8.921601832303174e-06, + "loss": 0.8583, + "step": 5171 + }, + { + "epoch": 0.87, + "learning_rate": 8.916204010588483e-06, + "loss": 0.9031, + "step": 5172 + }, + { + "epoch": 0.87, + "learning_rate": 8.910806508388183e-06, + "loss": 0.8971, + "step": 5173 + }, + { + "epoch": 0.87, + "learning_rate": 8.905409327293508e-06, + "loss": 0.8944, + "step": 5174 + }, + { + "epoch": 0.87, + "learning_rate": 8.900012468895602e-06, + "loss": 0.9024, + "step": 5175 + }, + { + "epoch": 0.87, + "learning_rate": 8.894615934785519e-06, + "loss": 0.8666, + "step": 5176 + }, + { + "epoch": 0.87, + "learning_rate": 8.88921972655421e-06, + "loss": 0.864, + "step": 5177 + }, + { + "epoch": 0.87, + "learning_rate": 8.883823845792534e-06, + "loss": 0.8403, + "step": 5178 + }, + { + "epoch": 0.87, + "learning_rate": 8.878428294091257e-06, + "loss": 0.8602, + "step": 5179 + }, + { + "epoch": 0.87, + "learning_rate": 8.873033073041037e-06, + "loss": 0.9309, + "step": 5180 + }, + { + "epoch": 0.87, + "learning_rate": 8.867638184232446e-06, + "loss": 0.8742, + "step": 5181 + }, + { + "epoch": 0.87, + "learning_rate": 8.862243629255957e-06, + "loss": 0.9297, + "step": 5182 + }, + { + "epoch": 0.87, + "learning_rate": 8.856849409701936e-06, + "loss": 0.8421, + "step": 5183 + }, + { + "epoch": 0.87, + "learning_rate": 8.851455527160658e-06, + "loss": 0.9038, + "step": 5184 + }, + { + "epoch": 0.87, + "learning_rate": 8.846061983222293e-06, + "loss": 0.9235, + "step": 5185 + }, + { + "epoch": 0.87, + "learning_rate": 8.840668779476917e-06, + "loss": 0.9555, + "step": 5186 + }, + { + "epoch": 0.87, + "learning_rate": 8.835275917514501e-06, + "loss": 0.8577, + "step": 5187 + }, + { + "epoch": 0.87, + "learning_rate": 8.829883398924915e-06, + "loss": 0.8895, + "step": 5188 + }, + { + "epoch": 0.87, + "learning_rate": 8.824491225297935e-06, + "loss": 0.8828, + "step": 5189 + }, + { + "epoch": 0.87, + "learning_rate": 8.81909939822323e-06, + "loss": 0.8685, + "step": 5190 + }, + { + "epoch": 0.87, + "learning_rate": 8.813707919290362e-06, + "loss": 0.8509, + "step": 5191 + }, + { + "epoch": 0.87, + "learning_rate": 8.808316790088801e-06, + "loss": 0.9376, + "step": 5192 + }, + { + "epoch": 0.87, + "learning_rate": 8.802926012207902e-06, + "loss": 0.9086, + "step": 5193 + }, + { + "epoch": 0.87, + "learning_rate": 8.79753558723693e-06, + "loss": 0.8415, + "step": 5194 + }, + { + "epoch": 0.87, + "learning_rate": 8.792145516765031e-06, + "loss": 0.8971, + "step": 5195 + }, + { + "epoch": 0.87, + "learning_rate": 8.786755802381255e-06, + "loss": 0.8252, + "step": 5196 + }, + { + "epoch": 0.87, + "learning_rate": 8.781366445674555e-06, + "loss": 0.8676, + "step": 5197 + }, + { + "epoch": 0.87, + "learning_rate": 8.77597744823376e-06, + "loss": 0.8589, + "step": 5198 + }, + { + "epoch": 0.87, + "learning_rate": 8.77058881164761e-06, + "loss": 0.8617, + "step": 5199 + }, + { + "epoch": 0.87, + "learning_rate": 8.765200537504725e-06, + "loss": 0.8715, + "step": 5200 + }, + { + "epoch": 0.87, + "learning_rate": 8.759812627393628e-06, + "loss": 0.962, + "step": 5201 + }, + { + "epoch": 0.87, + "learning_rate": 8.754425082902728e-06, + "loss": 0.955, + "step": 5202 + }, + { + "epoch": 0.87, + "learning_rate": 8.749037905620334e-06, + "loss": 0.8924, + "step": 5203 + }, + { + "epoch": 0.87, + "learning_rate": 8.743651097134637e-06, + "loss": 0.8201, + "step": 5204 + }, + { + "epoch": 0.87, + "learning_rate": 8.738264659033731e-06, + "loss": 0.8991, + "step": 5205 + }, + { + "epoch": 0.87, + "learning_rate": 8.73287859290559e-06, + "loss": 0.9271, + "step": 5206 + }, + { + "epoch": 0.87, + "learning_rate": 8.727492900338083e-06, + "loss": 0.9123, + "step": 5207 + }, + { + "epoch": 0.87, + "learning_rate": 8.72210758291897e-06, + "loss": 0.8997, + "step": 5208 + }, + { + "epoch": 0.87, + "learning_rate": 8.716722642235897e-06, + "loss": 0.8751, + "step": 5209 + }, + { + "epoch": 0.87, + "learning_rate": 8.711338079876401e-06, + "loss": 0.8737, + "step": 5210 + }, + { + "epoch": 0.87, + "learning_rate": 8.705953897427908e-06, + "loss": 0.367, + "step": 5211 + }, + { + "epoch": 0.87, + "learning_rate": 8.70057009647773e-06, + "loss": 0.8668, + "step": 5212 + }, + { + "epoch": 0.87, + "learning_rate": 8.695186678613068e-06, + "loss": 0.8296, + "step": 5213 + }, + { + "epoch": 0.87, + "learning_rate": 8.68980364542101e-06, + "loss": 0.7355, + "step": 5214 + }, + { + "epoch": 0.87, + "learning_rate": 8.684420998488536e-06, + "loss": 0.864, + "step": 5215 + }, + { + "epoch": 0.87, + "learning_rate": 8.679038739402497e-06, + "loss": 0.9209, + "step": 5216 + }, + { + "epoch": 0.87, + "learning_rate": 8.673656869749647e-06, + "loss": 0.8658, + "step": 5217 + }, + { + "epoch": 0.87, + "learning_rate": 8.668275391116613e-06, + "loss": 0.8781, + "step": 5218 + }, + { + "epoch": 0.87, + "learning_rate": 8.662894305089912e-06, + "loss": 0.8744, + "step": 5219 + }, + { + "epoch": 0.88, + "learning_rate": 8.657513613255942e-06, + "loss": 0.9521, + "step": 5220 + }, + { + "epoch": 0.88, + "learning_rate": 8.652133317200993e-06, + "loss": 0.9178, + "step": 5221 + }, + { + "epoch": 0.88, + "learning_rate": 8.646753418511232e-06, + "loss": 0.8411, + "step": 5222 + }, + { + "epoch": 0.88, + "learning_rate": 8.641373918772702e-06, + "loss": 0.8826, + "step": 5223 + }, + { + "epoch": 0.88, + "learning_rate": 8.635994819571345e-06, + "loss": 0.8913, + "step": 5224 + }, + { + "epoch": 0.88, + "learning_rate": 8.630616122492967e-06, + "loss": 0.8763, + "step": 5225 + }, + { + "epoch": 0.88, + "learning_rate": 8.625237829123274e-06, + "loss": 0.94, + "step": 5226 + }, + { + "epoch": 0.88, + "learning_rate": 8.619859941047832e-06, + "loss": 0.8952, + "step": 5227 + }, + { + "epoch": 0.88, + "learning_rate": 8.614482459852104e-06, + "loss": 0.8718, + "step": 5228 + }, + { + "epoch": 0.88, + "learning_rate": 8.609105387121429e-06, + "loss": 0.9343, + "step": 5229 + }, + { + "epoch": 0.88, + "learning_rate": 8.603728724441025e-06, + "loss": 0.9447, + "step": 5230 + }, + { + "epoch": 0.88, + "learning_rate": 8.598352473395985e-06, + "loss": 0.8317, + "step": 5231 + }, + { + "epoch": 0.88, + "learning_rate": 8.592976635571287e-06, + "loss": 0.8757, + "step": 5232 + }, + { + "epoch": 0.88, + "learning_rate": 8.587601212551781e-06, + "loss": 0.9226, + "step": 5233 + }, + { + "epoch": 0.88, + "learning_rate": 8.5822262059222e-06, + "loss": 0.8494, + "step": 5234 + }, + { + "epoch": 0.88, + "learning_rate": 8.576851617267151e-06, + "loss": 0.8202, + "step": 5235 + }, + { + "epoch": 0.88, + "learning_rate": 8.57147744817112e-06, + "loss": 0.9269, + "step": 5236 + }, + { + "epoch": 0.88, + "learning_rate": 8.566103700218465e-06, + "loss": 0.8688, + "step": 5237 + }, + { + "epoch": 0.88, + "learning_rate": 8.560730374993429e-06, + "loss": 0.8646, + "step": 5238 + }, + { + "epoch": 0.88, + "learning_rate": 8.55535747408012e-06, + "loss": 0.8907, + "step": 5239 + }, + { + "epoch": 0.88, + "learning_rate": 8.549984999062526e-06, + "loss": 0.9021, + "step": 5240 + }, + { + "epoch": 0.88, + "learning_rate": 8.54461295152451e-06, + "loss": 0.8291, + "step": 5241 + }, + { + "epoch": 0.88, + "learning_rate": 8.539241333049807e-06, + "loss": 0.9346, + "step": 5242 + }, + { + "epoch": 0.88, + "learning_rate": 8.533870145222028e-06, + "loss": 0.8749, + "step": 5243 + }, + { + "epoch": 0.88, + "learning_rate": 8.528499389624653e-06, + "loss": 0.8409, + "step": 5244 + }, + { + "epoch": 0.88, + "learning_rate": 8.523129067841033e-06, + "loss": 0.8676, + "step": 5245 + }, + { + "epoch": 0.88, + "learning_rate": 8.517759181454403e-06, + "loss": 0.9301, + "step": 5246 + }, + { + "epoch": 0.88, + "learning_rate": 8.512389732047859e-06, + "loss": 0.8867, + "step": 5247 + }, + { + "epoch": 0.88, + "learning_rate": 8.507020721204368e-06, + "loss": 0.9164, + "step": 5248 + }, + { + "epoch": 0.88, + "learning_rate": 8.501652150506776e-06, + "loss": 0.8633, + "step": 5249 + }, + { + "epoch": 0.88, + "learning_rate": 8.496284021537788e-06, + "loss": 0.8803, + "step": 5250 + }, + { + "epoch": 0.88, + "learning_rate": 8.490916335879988e-06, + "loss": 0.8447, + "step": 5251 + }, + { + "epoch": 0.88, + "learning_rate": 8.485549095115821e-06, + "loss": 0.8412, + "step": 5252 + }, + { + "epoch": 0.88, + "learning_rate": 8.48018230082761e-06, + "loss": 0.8996, + "step": 5253 + }, + { + "epoch": 0.88, + "learning_rate": 8.474815954597541e-06, + "loss": 0.8497, + "step": 5254 + }, + { + "epoch": 0.88, + "learning_rate": 8.469450058007673e-06, + "loss": 0.9213, + "step": 5255 + }, + { + "epoch": 0.88, + "learning_rate": 8.464084612639922e-06, + "loss": 0.8492, + "step": 5256 + }, + { + "epoch": 0.88, + "learning_rate": 8.458719620076083e-06, + "loss": 0.8508, + "step": 5257 + }, + { + "epoch": 0.88, + "learning_rate": 8.453355081897804e-06, + "loss": 0.9272, + "step": 5258 + }, + { + "epoch": 0.88, + "learning_rate": 8.447990999686618e-06, + "loss": 0.8768, + "step": 5259 + }, + { + "epoch": 0.88, + "learning_rate": 8.442627375023902e-06, + "loss": 0.877, + "step": 5260 + }, + { + "epoch": 0.88, + "learning_rate": 8.43726420949091e-06, + "loss": 0.9534, + "step": 5261 + }, + { + "epoch": 0.88, + "learning_rate": 8.431901504668766e-06, + "loss": 0.8909, + "step": 5262 + }, + { + "epoch": 0.88, + "learning_rate": 8.426539262138448e-06, + "loss": 0.8701, + "step": 5263 + }, + { + "epoch": 0.88, + "learning_rate": 8.421177483480803e-06, + "loss": 0.8666, + "step": 5264 + }, + { + "epoch": 0.88, + "learning_rate": 8.415816170276533e-06, + "loss": 0.8215, + "step": 5265 + }, + { + "epoch": 0.88, + "learning_rate": 8.410455324106214e-06, + "loss": 0.9253, + "step": 5266 + }, + { + "epoch": 0.88, + "learning_rate": 8.405094946550281e-06, + "loss": 0.8448, + "step": 5267 + }, + { + "epoch": 0.88, + "learning_rate": 8.399735039189026e-06, + "loss": 0.8743, + "step": 5268 + }, + { + "epoch": 0.88, + "learning_rate": 8.394375603602602e-06, + "loss": 0.9085, + "step": 5269 + }, + { + "epoch": 0.88, + "learning_rate": 8.389016641371038e-06, + "loss": 0.3927, + "step": 5270 + }, + { + "epoch": 0.88, + "learning_rate": 8.383658154074203e-06, + "loss": 0.8316, + "step": 5271 + }, + { + "epoch": 0.88, + "learning_rate": 8.37830014329184e-06, + "loss": 0.8733, + "step": 5272 + }, + { + "epoch": 0.88, + "learning_rate": 8.372942610603542e-06, + "loss": 0.8807, + "step": 5273 + }, + { + "epoch": 0.88, + "learning_rate": 8.367585557588769e-06, + "loss": 0.8614, + "step": 5274 + }, + { + "epoch": 0.88, + "learning_rate": 8.362228985826834e-06, + "loss": 0.9227, + "step": 5275 + }, + { + "epoch": 0.88, + "learning_rate": 8.356872896896913e-06, + "loss": 0.8956, + "step": 5276 + }, + { + "epoch": 0.88, + "learning_rate": 8.351517292378031e-06, + "loss": 0.9079, + "step": 5277 + }, + { + "epoch": 0.88, + "learning_rate": 8.346162173849082e-06, + "loss": 0.8977, + "step": 5278 + }, + { + "epoch": 0.88, + "learning_rate": 8.34080754288881e-06, + "loss": 0.8618, + "step": 5279 + }, + { + "epoch": 0.89, + "learning_rate": 8.335453401075818e-06, + "loss": 0.9034, + "step": 5280 + }, + { + "epoch": 0.89, + "learning_rate": 8.330099749988558e-06, + "loss": 0.9036, + "step": 5281 + }, + { + "epoch": 0.89, + "learning_rate": 8.324746591205349e-06, + "loss": 0.8581, + "step": 5282 + }, + { + "epoch": 0.89, + "learning_rate": 8.31939392630435e-06, + "loss": 0.3378, + "step": 5283 + }, + { + "epoch": 0.89, + "learning_rate": 8.314041756863589e-06, + "loss": 0.7986, + "step": 5284 + }, + { + "epoch": 0.89, + "learning_rate": 8.308690084460935e-06, + "loss": 0.9375, + "step": 5285 + }, + { + "epoch": 0.89, + "learning_rate": 8.303338910674124e-06, + "loss": 0.8382, + "step": 5286 + }, + { + "epoch": 0.89, + "learning_rate": 8.297988237080735e-06, + "loss": 0.9749, + "step": 5287 + }, + { + "epoch": 0.89, + "learning_rate": 8.292638065258203e-06, + "loss": 0.8621, + "step": 5288 + }, + { + "epoch": 0.89, + "learning_rate": 8.287288396783814e-06, + "loss": 0.9283, + "step": 5289 + }, + { + "epoch": 0.89, + "learning_rate": 8.281939233234706e-06, + "loss": 0.8911, + "step": 5290 + }, + { + "epoch": 0.89, + "learning_rate": 8.27659057618787e-06, + "loss": 0.9478, + "step": 5291 + }, + { + "epoch": 0.89, + "learning_rate": 8.27124242722014e-06, + "loss": 0.8585, + "step": 5292 + }, + { + "epoch": 0.89, + "learning_rate": 8.265894787908213e-06, + "loss": 0.8727, + "step": 5293 + }, + { + "epoch": 0.89, + "learning_rate": 8.260547659828625e-06, + "loss": 0.8992, + "step": 5294 + }, + { + "epoch": 0.89, + "learning_rate": 8.255201044557773e-06, + "loss": 0.8768, + "step": 5295 + }, + { + "epoch": 0.89, + "learning_rate": 8.249854943671884e-06, + "loss": 0.941, + "step": 5296 + }, + { + "epoch": 0.89, + "learning_rate": 8.244509358747052e-06, + "loss": 0.8202, + "step": 5297 + }, + { + "epoch": 0.89, + "learning_rate": 8.239164291359207e-06, + "loss": 0.8993, + "step": 5298 + }, + { + "epoch": 0.89, + "learning_rate": 8.233819743084137e-06, + "loss": 0.8807, + "step": 5299 + }, + { + "epoch": 0.89, + "learning_rate": 8.228475715497461e-06, + "loss": 0.9349, + "step": 5300 + }, + { + "epoch": 0.89, + "learning_rate": 8.223132210174663e-06, + "loss": 0.8929, + "step": 5301 + }, + { + "epoch": 0.89, + "learning_rate": 8.217789228691055e-06, + "loss": 0.8899, + "step": 5302 + }, + { + "epoch": 0.89, + "learning_rate": 8.212446772621813e-06, + "loss": 1.0142, + "step": 5303 + }, + { + "epoch": 0.89, + "learning_rate": 8.207104843541944e-06, + "loss": 0.8686, + "step": 5304 + }, + { + "epoch": 0.89, + "learning_rate": 8.20176344302631e-06, + "loss": 0.9092, + "step": 5305 + }, + { + "epoch": 0.89, + "learning_rate": 8.196422572649602e-06, + "loss": 0.8584, + "step": 5306 + }, + { + "epoch": 0.89, + "learning_rate": 8.191082233986375e-06, + "loss": 0.3634, + "step": 5307 + }, + { + "epoch": 0.89, + "learning_rate": 8.18574242861101e-06, + "loss": 0.8787, + "step": 5308 + }, + { + "epoch": 0.89, + "learning_rate": 8.180403158097738e-06, + "loss": 0.8718, + "step": 5309 + }, + { + "epoch": 0.89, + "learning_rate": 8.17506442402063e-06, + "loss": 0.8987, + "step": 5310 + }, + { + "epoch": 0.89, + "learning_rate": 8.169726227953605e-06, + "loss": 0.8664, + "step": 5311 + }, + { + "epoch": 0.89, + "learning_rate": 8.16438857147042e-06, + "loss": 0.8728, + "step": 5312 + }, + { + "epoch": 0.89, + "learning_rate": 8.15905145614467e-06, + "loss": 0.9023, + "step": 5313 + }, + { + "epoch": 0.89, + "learning_rate": 8.153714883549789e-06, + "loss": 0.805, + "step": 5314 + }, + { + "epoch": 0.89, + "learning_rate": 8.148378855259056e-06, + "loss": 0.8865, + "step": 5315 + }, + { + "epoch": 0.89, + "learning_rate": 8.143043372845593e-06, + "loss": 0.3529, + "step": 5316 + }, + { + "epoch": 0.89, + "learning_rate": 8.13770843788235e-06, + "loss": 0.8822, + "step": 5317 + }, + { + "epoch": 0.89, + "learning_rate": 8.13237405194212e-06, + "loss": 0.9017, + "step": 5318 + }, + { + "epoch": 0.89, + "learning_rate": 8.12704021659754e-06, + "loss": 0.8721, + "step": 5319 + }, + { + "epoch": 0.89, + "learning_rate": 8.121706933421084e-06, + "loss": 0.8697, + "step": 5320 + }, + { + "epoch": 0.89, + "learning_rate": 8.116374203985054e-06, + "loss": 0.9285, + "step": 5321 + }, + { + "epoch": 0.89, + "learning_rate": 8.111042029861595e-06, + "loss": 0.8626, + "step": 5322 + }, + { + "epoch": 0.89, + "learning_rate": 8.105710412622686e-06, + "loss": 0.3665, + "step": 5323 + }, + { + "epoch": 0.89, + "learning_rate": 8.10037935384015e-06, + "loss": 0.3747, + "step": 5324 + }, + { + "epoch": 0.89, + "learning_rate": 8.095048855085633e-06, + "loss": 0.8964, + "step": 5325 + }, + { + "epoch": 0.89, + "learning_rate": 8.08971891793062e-06, + "loss": 0.8367, + "step": 5326 + }, + { + "epoch": 0.89, + "learning_rate": 8.08438954394644e-06, + "loss": 0.9298, + "step": 5327 + }, + { + "epoch": 0.89, + "learning_rate": 8.079060734704241e-06, + "loss": 0.8759, + "step": 5328 + }, + { + "epoch": 0.89, + "learning_rate": 8.073732491775015e-06, + "loss": 0.9289, + "step": 5329 + }, + { + "epoch": 0.89, + "learning_rate": 8.068404816729584e-06, + "loss": 0.8623, + "step": 5330 + }, + { + "epoch": 0.89, + "learning_rate": 8.063077711138601e-06, + "loss": 0.8686, + "step": 5331 + }, + { + "epoch": 0.89, + "learning_rate": 8.057751176572555e-06, + "loss": 0.9178, + "step": 5332 + }, + { + "epoch": 0.89, + "learning_rate": 8.05242521460176e-06, + "loss": 0.3408, + "step": 5333 + }, + { + "epoch": 0.89, + "learning_rate": 8.047099826796363e-06, + "loss": 0.8558, + "step": 5334 + }, + { + "epoch": 0.89, + "learning_rate": 8.041775014726354e-06, + "loss": 0.9252, + "step": 5335 + }, + { + "epoch": 0.89, + "learning_rate": 8.036450779961533e-06, + "loss": 0.8631, + "step": 5336 + }, + { + "epoch": 0.89, + "learning_rate": 8.031127124071549e-06, + "loss": 0.3767, + "step": 5337 + }, + { + "epoch": 0.89, + "learning_rate": 8.025804048625862e-06, + "loss": 0.893, + "step": 5338 + }, + { + "epoch": 0.9, + "learning_rate": 8.02048155519378e-06, + "loss": 0.9175, + "step": 5339 + }, + { + "epoch": 0.9, + "learning_rate": 8.015159645344421e-06, + "loss": 0.8482, + "step": 5340 + }, + { + "epoch": 0.9, + "learning_rate": 8.009838320646746e-06, + "loss": 0.8825, + "step": 5341 + }, + { + "epoch": 0.9, + "learning_rate": 8.004517582669527e-06, + "loss": 0.8882, + "step": 5342 + }, + { + "epoch": 0.9, + "learning_rate": 7.999197432981389e-06, + "loss": 0.9107, + "step": 5343 + }, + { + "epoch": 0.9, + "learning_rate": 7.993877873150756e-06, + "loss": 0.8453, + "step": 5344 + }, + { + "epoch": 0.9, + "learning_rate": 7.988558904745895e-06, + "loss": 0.9244, + "step": 5345 + }, + { + "epoch": 0.9, + "learning_rate": 7.983240529334891e-06, + "loss": 0.8672, + "step": 5346 + }, + { + "epoch": 0.9, + "learning_rate": 7.977922748485658e-06, + "loss": 0.8354, + "step": 5347 + }, + { + "epoch": 0.9, + "learning_rate": 7.972605563765933e-06, + "loss": 0.8239, + "step": 5348 + }, + { + "epoch": 0.9, + "learning_rate": 7.967288976743276e-06, + "loss": 0.855, + "step": 5349 + }, + { + "epoch": 0.9, + "learning_rate": 7.961972988985072e-06, + "loss": 0.952, + "step": 5350 + }, + { + "epoch": 0.9, + "learning_rate": 7.956657602058534e-06, + "loss": 0.8862, + "step": 5351 + }, + { + "epoch": 0.9, + "learning_rate": 7.951342817530693e-06, + "loss": 0.9398, + "step": 5352 + }, + { + "epoch": 0.9, + "learning_rate": 7.9460286369684e-06, + "loss": 0.8501, + "step": 5353 + }, + { + "epoch": 0.9, + "learning_rate": 7.940715061938332e-06, + "loss": 0.3672, + "step": 5354 + }, + { + "epoch": 0.9, + "learning_rate": 7.935402094006985e-06, + "loss": 0.8541, + "step": 5355 + }, + { + "epoch": 0.9, + "learning_rate": 7.93008973474068e-06, + "loss": 0.8454, + "step": 5356 + }, + { + "epoch": 0.9, + "learning_rate": 7.924777985705556e-06, + "loss": 0.9319, + "step": 5357 + }, + { + "epoch": 0.9, + "learning_rate": 7.91946684846757e-06, + "loss": 0.8934, + "step": 5358 + }, + { + "epoch": 0.9, + "learning_rate": 7.914156324592501e-06, + "loss": 0.9132, + "step": 5359 + }, + { + "epoch": 0.9, + "learning_rate": 7.90884641564595e-06, + "loss": 0.8816, + "step": 5360 + }, + { + "epoch": 0.9, + "learning_rate": 7.903537123193332e-06, + "loss": 0.9099, + "step": 5361 + }, + { + "epoch": 0.9, + "learning_rate": 7.898228448799884e-06, + "loss": 0.8667, + "step": 5362 + }, + { + "epoch": 0.9, + "learning_rate": 7.89292039403065e-06, + "loss": 0.8919, + "step": 5363 + }, + { + "epoch": 0.9, + "learning_rate": 7.887612960450513e-06, + "loss": 0.914, + "step": 5364 + }, + { + "epoch": 0.9, + "learning_rate": 7.882306149624149e-06, + "loss": 0.8361, + "step": 5365 + }, + { + "epoch": 0.9, + "learning_rate": 7.876999963116066e-06, + "loss": 0.8037, + "step": 5366 + }, + { + "epoch": 0.9, + "learning_rate": 7.871694402490578e-06, + "loss": 0.9106, + "step": 5367 + }, + { + "epoch": 0.9, + "learning_rate": 7.866389469311831e-06, + "loss": 0.9222, + "step": 5368 + }, + { + "epoch": 0.9, + "learning_rate": 7.861085165143766e-06, + "loss": 0.8617, + "step": 5369 + }, + { + "epoch": 0.9, + "learning_rate": 7.855781491550151e-06, + "loss": 0.9474, + "step": 5370 + }, + { + "epoch": 0.9, + "learning_rate": 7.85047845009456e-06, + "loss": 0.9235, + "step": 5371 + }, + { + "epoch": 0.9, + "learning_rate": 7.845176042340392e-06, + "loss": 0.8924, + "step": 5372 + }, + { + "epoch": 0.9, + "learning_rate": 7.839874269850846e-06, + "loss": 0.864, + "step": 5373 + }, + { + "epoch": 0.9, + "learning_rate": 7.834573134188942e-06, + "loss": 0.8756, + "step": 5374 + }, + { + "epoch": 0.9, + "learning_rate": 7.829272636917509e-06, + "loss": 0.8507, + "step": 5375 + }, + { + "epoch": 0.9, + "learning_rate": 7.82397277959919e-06, + "loss": 0.8857, + "step": 5376 + }, + { + "epoch": 0.9, + "learning_rate": 7.818673563796442e-06, + "loss": 0.8283, + "step": 5377 + }, + { + "epoch": 0.9, + "learning_rate": 7.813374991071524e-06, + "loss": 0.8714, + "step": 5378 + }, + { + "epoch": 0.9, + "learning_rate": 7.808077062986515e-06, + "loss": 0.8623, + "step": 5379 + }, + { + "epoch": 0.9, + "learning_rate": 7.802779781103293e-06, + "loss": 0.9342, + "step": 5380 + }, + { + "epoch": 0.9, + "learning_rate": 7.797483146983557e-06, + "loss": 0.8776, + "step": 5381 + }, + { + "epoch": 0.9, + "learning_rate": 7.79218716218881e-06, + "loss": 0.8924, + "step": 5382 + }, + { + "epoch": 0.9, + "learning_rate": 7.786891828280361e-06, + "loss": 0.9003, + "step": 5383 + }, + { + "epoch": 0.9, + "learning_rate": 7.781597146819331e-06, + "loss": 0.8373, + "step": 5384 + }, + { + "epoch": 0.9, + "learning_rate": 7.776303119366653e-06, + "loss": 0.8916, + "step": 5385 + }, + { + "epoch": 0.9, + "learning_rate": 7.771009747483054e-06, + "loss": 0.9374, + "step": 5386 + }, + { + "epoch": 0.9, + "learning_rate": 7.76571703272908e-06, + "loss": 0.833, + "step": 5387 + }, + { + "epoch": 0.9, + "learning_rate": 7.760424976665074e-06, + "loss": 0.9273, + "step": 5388 + }, + { + "epoch": 0.9, + "learning_rate": 7.755133580851193e-06, + "loss": 0.8692, + "step": 5389 + }, + { + "epoch": 0.9, + "learning_rate": 7.749842846847394e-06, + "loss": 0.8531, + "step": 5390 + }, + { + "epoch": 0.9, + "learning_rate": 7.74455277621344e-06, + "loss": 0.8484, + "step": 5391 + }, + { + "epoch": 0.9, + "learning_rate": 7.739263370508902e-06, + "loss": 0.9214, + "step": 5392 + }, + { + "epoch": 0.9, + "learning_rate": 7.733974631293152e-06, + "loss": 0.8592, + "step": 5393 + }, + { + "epoch": 0.9, + "learning_rate": 7.728686560125364e-06, + "loss": 0.8326, + "step": 5394 + }, + { + "epoch": 0.9, + "learning_rate": 7.723399158564519e-06, + "loss": 0.8735, + "step": 5395 + }, + { + "epoch": 0.9, + "learning_rate": 7.718112428169394e-06, + "loss": 0.921, + "step": 5396 + }, + { + "epoch": 0.9, + "learning_rate": 7.712826370498577e-06, + "loss": 0.9355, + "step": 5397 + }, + { + "epoch": 0.9, + "learning_rate": 7.707540987110448e-06, + "loss": 0.8233, + "step": 5398 + }, + { + "epoch": 0.91, + "learning_rate": 7.702256279563194e-06, + "loss": 0.9144, + "step": 5399 + }, + { + "epoch": 0.91, + "learning_rate": 7.696972249414807e-06, + "loss": 0.8897, + "step": 5400 + }, + { + "epoch": 0.91, + "learning_rate": 7.69168889822307e-06, + "loss": 0.8731, + "step": 5401 + }, + { + "epoch": 0.91, + "learning_rate": 7.686406227545575e-06, + "loss": 0.8877, + "step": 5402 + }, + { + "epoch": 0.91, + "learning_rate": 7.681124238939701e-06, + "loss": 0.9052, + "step": 5403 + }, + { + "epoch": 0.91, + "learning_rate": 7.675842933962641e-06, + "loss": 0.9524, + "step": 5404 + }, + { + "epoch": 0.91, + "learning_rate": 7.670562314171373e-06, + "loss": 0.8696, + "step": 5405 + }, + { + "epoch": 0.91, + "learning_rate": 7.665282381122681e-06, + "loss": 0.9285, + "step": 5406 + }, + { + "epoch": 0.91, + "learning_rate": 7.66000313637314e-06, + "loss": 0.8992, + "step": 5407 + }, + { + "epoch": 0.91, + "learning_rate": 7.654724581479138e-06, + "loss": 0.9149, + "step": 5408 + }, + { + "epoch": 0.91, + "learning_rate": 7.649446717996838e-06, + "loss": 0.8776, + "step": 5409 + }, + { + "epoch": 0.91, + "learning_rate": 7.644169547482216e-06, + "loss": 0.8927, + "step": 5410 + }, + { + "epoch": 0.91, + "learning_rate": 7.63889307149103e-06, + "loss": 0.8591, + "step": 5411 + }, + { + "epoch": 0.91, + "learning_rate": 7.63361729157885e-06, + "loss": 0.9171, + "step": 5412 + }, + { + "epoch": 0.91, + "learning_rate": 7.62834220930102e-06, + "loss": 0.8637, + "step": 5413 + }, + { + "epoch": 0.91, + "learning_rate": 7.623067826212697e-06, + "loss": 0.8605, + "step": 5414 + }, + { + "epoch": 0.91, + "learning_rate": 7.617794143868817e-06, + "loss": 0.8781, + "step": 5415 + }, + { + "epoch": 0.91, + "learning_rate": 7.612521163824123e-06, + "loss": 0.8993, + "step": 5416 + }, + { + "epoch": 0.91, + "learning_rate": 7.607248887633145e-06, + "loss": 0.9094, + "step": 5417 + }, + { + "epoch": 0.91, + "learning_rate": 7.601977316850202e-06, + "loss": 0.8909, + "step": 5418 + }, + { + "epoch": 0.91, + "learning_rate": 7.59670645302941e-06, + "loss": 0.8706, + "step": 5419 + }, + { + "epoch": 0.91, + "learning_rate": 7.5914362977246745e-06, + "loss": 0.8645, + "step": 5420 + }, + { + "epoch": 0.91, + "learning_rate": 7.58616685248969e-06, + "loss": 0.9612, + "step": 5421 + }, + { + "epoch": 0.91, + "learning_rate": 7.580898118877949e-06, + "loss": 0.8544, + "step": 5422 + }, + { + "epoch": 0.91, + "learning_rate": 7.5756300984427234e-06, + "loss": 0.3691, + "step": 5423 + }, + { + "epoch": 0.91, + "learning_rate": 7.5703627927370806e-06, + "loss": 0.9043, + "step": 5424 + }, + { + "epoch": 0.91, + "learning_rate": 7.5650962033138855e-06, + "loss": 0.8911, + "step": 5425 + }, + { + "epoch": 0.91, + "learning_rate": 7.559830331725776e-06, + "loss": 0.799, + "step": 5426 + }, + { + "epoch": 0.91, + "learning_rate": 7.5545651795251906e-06, + "loss": 0.8862, + "step": 5427 + }, + { + "epoch": 0.91, + "learning_rate": 7.549300748264347e-06, + "loss": 0.8469, + "step": 5428 + }, + { + "epoch": 0.91, + "learning_rate": 7.544037039495258e-06, + "loss": 0.8945, + "step": 5429 + }, + { + "epoch": 0.91, + "learning_rate": 7.538774054769717e-06, + "loss": 0.9077, + "step": 5430 + }, + { + "epoch": 0.91, + "learning_rate": 7.53351179563931e-06, + "loss": 0.9388, + "step": 5431 + }, + { + "epoch": 0.91, + "learning_rate": 7.528250263655399e-06, + "loss": 0.9318, + "step": 5432 + }, + { + "epoch": 0.91, + "learning_rate": 7.52298946036915e-06, + "loss": 0.9773, + "step": 5433 + }, + { + "epoch": 0.91, + "learning_rate": 7.517729387331496e-06, + "loss": 0.8482, + "step": 5434 + }, + { + "epoch": 0.91, + "learning_rate": 7.512470046093163e-06, + "loss": 0.8508, + "step": 5435 + }, + { + "epoch": 0.91, + "learning_rate": 7.507211438204658e-06, + "loss": 0.9042, + "step": 5436 + }, + { + "epoch": 0.91, + "learning_rate": 7.501953565216276e-06, + "loss": 0.9234, + "step": 5437 + }, + { + "epoch": 0.91, + "learning_rate": 7.49669642867809e-06, + "loss": 0.8528, + "step": 5438 + }, + { + "epoch": 0.91, + "learning_rate": 7.4914400301399626e-06, + "loss": 0.8815, + "step": 5439 + }, + { + "epoch": 0.91, + "learning_rate": 7.486184371151528e-06, + "loss": 0.8212, + "step": 5440 + }, + { + "epoch": 0.91, + "learning_rate": 7.4809294532622175e-06, + "loss": 0.9278, + "step": 5441 + }, + { + "epoch": 0.91, + "learning_rate": 7.475675278021235e-06, + "loss": 0.8927, + "step": 5442 + }, + { + "epoch": 0.91, + "learning_rate": 7.47042184697756e-06, + "loss": 0.878, + "step": 5443 + }, + { + "epoch": 0.91, + "learning_rate": 7.465169161679964e-06, + "loss": 0.3249, + "step": 5444 + }, + { + "epoch": 0.91, + "learning_rate": 7.4599172236769935e-06, + "loss": 0.8862, + "step": 5445 + }, + { + "epoch": 0.91, + "learning_rate": 7.454666034516973e-06, + "loss": 0.9021, + "step": 5446 + }, + { + "epoch": 0.91, + "learning_rate": 7.449415595748008e-06, + "loss": 0.8811, + "step": 5447 + }, + { + "epoch": 0.91, + "learning_rate": 7.4441659089179815e-06, + "loss": 0.986, + "step": 5448 + }, + { + "epoch": 0.91, + "learning_rate": 7.43891697557456e-06, + "loss": 0.901, + "step": 5449 + }, + { + "epoch": 0.91, + "learning_rate": 7.433668797265181e-06, + "loss": 0.9233, + "step": 5450 + }, + { + "epoch": 0.91, + "learning_rate": 7.428421375537063e-06, + "loss": 0.8373, + "step": 5451 + }, + { + "epoch": 0.91, + "learning_rate": 7.423174711937202e-06, + "loss": 0.8997, + "step": 5452 + }, + { + "epoch": 0.91, + "learning_rate": 7.417928808012366e-06, + "loss": 0.8857, + "step": 5453 + }, + { + "epoch": 0.91, + "learning_rate": 7.412683665309106e-06, + "loss": 0.3876, + "step": 5454 + }, + { + "epoch": 0.91, + "learning_rate": 7.407439285373741e-06, + "loss": 0.3786, + "step": 5455 + }, + { + "epoch": 0.91, + "learning_rate": 7.40219566975237e-06, + "loss": 0.8611, + "step": 5456 + }, + { + "epoch": 0.91, + "learning_rate": 7.396952819990868e-06, + "loss": 0.8933, + "step": 5457 + }, + { + "epoch": 0.92, + "learning_rate": 7.391710737634884e-06, + "loss": 0.8459, + "step": 5458 + }, + { + "epoch": 0.92, + "learning_rate": 7.386469424229832e-06, + "loss": 0.9005, + "step": 5459 + }, + { + "epoch": 0.92, + "learning_rate": 7.381228881320912e-06, + "loss": 0.8206, + "step": 5460 + }, + { + "epoch": 0.92, + "learning_rate": 7.375989110453087e-06, + "loss": 0.8507, + "step": 5461 + }, + { + "epoch": 0.92, + "learning_rate": 7.370750113171099e-06, + "loss": 0.8546, + "step": 5462 + }, + { + "epoch": 0.92, + "learning_rate": 7.365511891019455e-06, + "loss": 0.8996, + "step": 5463 + }, + { + "epoch": 0.92, + "learning_rate": 7.360274445542439e-06, + "loss": 0.9057, + "step": 5464 + }, + { + "epoch": 0.92, + "learning_rate": 7.355037778284109e-06, + "loss": 0.8779, + "step": 5465 + }, + { + "epoch": 0.92, + "learning_rate": 7.3498018907882855e-06, + "loss": 0.8625, + "step": 5466 + }, + { + "epoch": 0.92, + "learning_rate": 7.344566784598565e-06, + "loss": 0.8653, + "step": 5467 + }, + { + "epoch": 0.92, + "learning_rate": 7.339332461258307e-06, + "loss": 0.3838, + "step": 5468 + }, + { + "epoch": 0.92, + "learning_rate": 7.334098922310651e-06, + "loss": 0.8996, + "step": 5469 + }, + { + "epoch": 0.92, + "learning_rate": 7.328866169298493e-06, + "loss": 0.8785, + "step": 5470 + }, + { + "epoch": 0.92, + "learning_rate": 7.323634203764507e-06, + "loss": 0.8689, + "step": 5471 + }, + { + "epoch": 0.92, + "learning_rate": 7.318403027251127e-06, + "loss": 0.8709, + "step": 5472 + }, + { + "epoch": 0.92, + "learning_rate": 7.313172641300565e-06, + "loss": 0.8368, + "step": 5473 + }, + { + "epoch": 0.92, + "learning_rate": 7.30794304745479e-06, + "loss": 0.8713, + "step": 5474 + }, + { + "epoch": 0.92, + "learning_rate": 7.302714247255542e-06, + "loss": 0.3768, + "step": 5475 + }, + { + "epoch": 0.92, + "learning_rate": 7.297486242244322e-06, + "loss": 0.9232, + "step": 5476 + }, + { + "epoch": 0.92, + "learning_rate": 7.292259033962408e-06, + "loss": 0.8631, + "step": 5477 + }, + { + "epoch": 0.92, + "learning_rate": 7.287032623950827e-06, + "loss": 0.8955, + "step": 5478 + }, + { + "epoch": 0.92, + "learning_rate": 7.281807013750386e-06, + "loss": 0.9064, + "step": 5479 + }, + { + "epoch": 0.92, + "learning_rate": 7.276582204901643e-06, + "loss": 0.898, + "step": 5480 + }, + { + "epoch": 0.92, + "learning_rate": 7.271358198944934e-06, + "loss": 0.8752, + "step": 5481 + }, + { + "epoch": 0.92, + "learning_rate": 7.266134997420345e-06, + "loss": 0.9178, + "step": 5482 + }, + { + "epoch": 0.92, + "learning_rate": 7.260912601867733e-06, + "loss": 0.8688, + "step": 5483 + }, + { + "epoch": 0.92, + "learning_rate": 7.255691013826714e-06, + "loss": 0.9347, + "step": 5484 + }, + { + "epoch": 0.92, + "learning_rate": 7.250470234836667e-06, + "loss": 0.8773, + "step": 5485 + }, + { + "epoch": 0.92, + "learning_rate": 7.24525026643673e-06, + "loss": 0.9112, + "step": 5486 + }, + { + "epoch": 0.92, + "learning_rate": 7.240031110165808e-06, + "loss": 0.919, + "step": 5487 + }, + { + "epoch": 0.92, + "learning_rate": 7.23481276756256e-06, + "loss": 0.8938, + "step": 5488 + }, + { + "epoch": 0.92, + "learning_rate": 7.229595240165406e-06, + "loss": 0.9383, + "step": 5489 + }, + { + "epoch": 0.92, + "learning_rate": 7.224378529512535e-06, + "loss": 0.9183, + "step": 5490 + }, + { + "epoch": 0.92, + "learning_rate": 7.219162637141881e-06, + "loss": 0.38, + "step": 5491 + }, + { + "epoch": 0.92, + "learning_rate": 7.213947564591148e-06, + "loss": 0.895, + "step": 5492 + }, + { + "epoch": 0.92, + "learning_rate": 7.208733313397791e-06, + "loss": 0.9153, + "step": 5493 + }, + { + "epoch": 0.92, + "learning_rate": 7.203519885099026e-06, + "loss": 0.8777, + "step": 5494 + }, + { + "epoch": 0.92, + "learning_rate": 7.198307281231827e-06, + "loss": 0.9345, + "step": 5495 + }, + { + "epoch": 0.92, + "learning_rate": 7.193095503332923e-06, + "loss": 0.8996, + "step": 5496 + }, + { + "epoch": 0.92, + "learning_rate": 7.187884552938799e-06, + "loss": 0.8954, + "step": 5497 + }, + { + "epoch": 0.92, + "learning_rate": 7.182674431585703e-06, + "loss": 0.8747, + "step": 5498 + }, + { + "epoch": 0.92, + "learning_rate": 7.177465140809629e-06, + "loss": 0.9026, + "step": 5499 + }, + { + "epoch": 0.92, + "learning_rate": 7.172256682146334e-06, + "loss": 0.8702, + "step": 5500 + }, + { + "epoch": 0.92, + "learning_rate": 7.167049057131324e-06, + "loss": 0.8609, + "step": 5501 + }, + { + "epoch": 0.92, + "learning_rate": 7.161842267299859e-06, + "loss": 0.91, + "step": 5502 + }, + { + "epoch": 0.92, + "learning_rate": 7.1566363141869576e-06, + "loss": 0.8708, + "step": 5503 + }, + { + "epoch": 0.92, + "learning_rate": 7.1514311993273884e-06, + "loss": 0.8506, + "step": 5504 + }, + { + "epoch": 0.92, + "learning_rate": 7.146226924255671e-06, + "loss": 0.8204, + "step": 5505 + }, + { + "epoch": 0.92, + "learning_rate": 7.141023490506085e-06, + "loss": 0.9374, + "step": 5506 + }, + { + "epoch": 0.92, + "learning_rate": 7.135820899612656e-06, + "loss": 0.898, + "step": 5507 + }, + { + "epoch": 0.92, + "learning_rate": 7.130619153109162e-06, + "loss": 0.8585, + "step": 5508 + }, + { + "epoch": 0.92, + "learning_rate": 7.125418252529128e-06, + "loss": 0.8635, + "step": 5509 + }, + { + "epoch": 0.92, + "learning_rate": 7.120218199405842e-06, + "loss": 0.3722, + "step": 5510 + }, + { + "epoch": 0.92, + "learning_rate": 7.115018995272326e-06, + "loss": 0.8823, + "step": 5511 + }, + { + "epoch": 0.92, + "learning_rate": 7.109820641661365e-06, + "loss": 0.892, + "step": 5512 + }, + { + "epoch": 0.92, + "learning_rate": 7.104623140105482e-06, + "loss": 0.7964, + "step": 5513 + }, + { + "epoch": 0.92, + "learning_rate": 7.09942649213696e-06, + "loss": 0.8775, + "step": 5514 + }, + { + "epoch": 0.92, + "learning_rate": 7.094230699287827e-06, + "loss": 0.8614, + "step": 5515 + }, + { + "epoch": 0.92, + "learning_rate": 7.089035763089851e-06, + "loss": 0.8348, + "step": 5516 + }, + { + "epoch": 0.92, + "learning_rate": 7.083841685074559e-06, + "loss": 0.9047, + "step": 5517 + }, + { + "epoch": 0.93, + "learning_rate": 7.078648466773215e-06, + "loss": 0.8988, + "step": 5518 + }, + { + "epoch": 0.93, + "learning_rate": 7.073456109716836e-06, + "loss": 0.4233, + "step": 5519 + }, + { + "epoch": 0.93, + "learning_rate": 7.068264615436184e-06, + "loss": 0.8203, + "step": 5520 + }, + { + "epoch": 0.93, + "learning_rate": 7.063073985461761e-06, + "loss": 0.8185, + "step": 5521 + }, + { + "epoch": 0.93, + "learning_rate": 7.057884221323825e-06, + "loss": 0.8629, + "step": 5522 + }, + { + "epoch": 0.93, + "learning_rate": 7.052695324552372e-06, + "loss": 0.8795, + "step": 5523 + }, + { + "epoch": 0.93, + "learning_rate": 7.047507296677141e-06, + "loss": 0.8788, + "step": 5524 + }, + { + "epoch": 0.93, + "learning_rate": 7.042320139227619e-06, + "loss": 0.9054, + "step": 5525 + }, + { + "epoch": 0.93, + "learning_rate": 7.037133853733029e-06, + "loss": 0.8902, + "step": 5526 + }, + { + "epoch": 0.93, + "learning_rate": 7.031948441722349e-06, + "loss": 0.8925, + "step": 5527 + }, + { + "epoch": 0.93, + "learning_rate": 7.0267639047242874e-06, + "loss": 0.8858, + "step": 5528 + }, + { + "epoch": 0.93, + "learning_rate": 7.021580244267299e-06, + "loss": 0.9061, + "step": 5529 + }, + { + "epoch": 0.93, + "learning_rate": 7.01639746187959e-06, + "loss": 0.8071, + "step": 5530 + }, + { + "epoch": 0.93, + "learning_rate": 7.0112155590890885e-06, + "loss": 0.9281, + "step": 5531 + }, + { + "epoch": 0.93, + "learning_rate": 7.006034537423481e-06, + "loss": 0.8552, + "step": 5532 + }, + { + "epoch": 0.93, + "learning_rate": 7.000854398410182e-06, + "loss": 0.8398, + "step": 5533 + }, + { + "epoch": 0.93, + "learning_rate": 6.99567514357635e-06, + "loss": 0.9137, + "step": 5534 + }, + { + "epoch": 0.93, + "learning_rate": 6.990496774448891e-06, + "loss": 0.8529, + "step": 5535 + }, + { + "epoch": 0.93, + "learning_rate": 6.985319292554433e-06, + "loss": 0.848, + "step": 5536 + }, + { + "epoch": 0.93, + "learning_rate": 6.9801426994193544e-06, + "loss": 0.9109, + "step": 5537 + }, + { + "epoch": 0.93, + "learning_rate": 6.974966996569774e-06, + "loss": 0.8957, + "step": 5538 + }, + { + "epoch": 0.93, + "learning_rate": 6.969792185531539e-06, + "loss": 0.8282, + "step": 5539 + }, + { + "epoch": 0.93, + "learning_rate": 6.9646182678302385e-06, + "loss": 0.8846, + "step": 5540 + }, + { + "epoch": 0.93, + "learning_rate": 6.959445244991197e-06, + "loss": 0.9494, + "step": 5541 + }, + { + "epoch": 0.93, + "learning_rate": 6.954273118539478e-06, + "loss": 0.9115, + "step": 5542 + }, + { + "epoch": 0.93, + "learning_rate": 6.949101889999874e-06, + "loss": 0.9528, + "step": 5543 + }, + { + "epoch": 0.93, + "learning_rate": 6.943931560896921e-06, + "loss": 0.8641, + "step": 5544 + }, + { + "epoch": 0.93, + "learning_rate": 6.938762132754879e-06, + "loss": 0.7888, + "step": 5545 + }, + { + "epoch": 0.93, + "learning_rate": 6.933593607097763e-06, + "loss": 0.8534, + "step": 5546 + }, + { + "epoch": 0.93, + "learning_rate": 6.928425985449296e-06, + "loss": 0.949, + "step": 5547 + }, + { + "epoch": 0.93, + "learning_rate": 6.923259269332956e-06, + "loss": 0.9083, + "step": 5548 + }, + { + "epoch": 0.93, + "learning_rate": 6.918093460271938e-06, + "loss": 0.881, + "step": 5549 + }, + { + "epoch": 0.93, + "learning_rate": 6.912928559789178e-06, + "loss": 0.8975, + "step": 5550 + }, + { + "epoch": 0.93, + "learning_rate": 6.907764569407345e-06, + "loss": 0.8693, + "step": 5551 + }, + { + "epoch": 0.93, + "learning_rate": 6.902601490648836e-06, + "loss": 0.8831, + "step": 5552 + }, + { + "epoch": 0.93, + "learning_rate": 6.897439325035778e-06, + "loss": 0.889, + "step": 5553 + }, + { + "epoch": 0.93, + "learning_rate": 6.892278074090033e-06, + "loss": 0.9394, + "step": 5554 + }, + { + "epoch": 0.93, + "learning_rate": 6.887117739333195e-06, + "loss": 0.9175, + "step": 5555 + }, + { + "epoch": 0.93, + "learning_rate": 6.881958322286582e-06, + "loss": 0.8472, + "step": 5556 + }, + { + "epoch": 0.93, + "learning_rate": 6.876799824471246e-06, + "loss": 0.874, + "step": 5557 + }, + { + "epoch": 0.93, + "learning_rate": 6.871642247407962e-06, + "loss": 0.8612, + "step": 5558 + }, + { + "epoch": 0.93, + "learning_rate": 6.86648559261724e-06, + "loss": 0.8862, + "step": 5559 + }, + { + "epoch": 0.93, + "learning_rate": 6.861329861619317e-06, + "loss": 0.8684, + "step": 5560 + }, + { + "epoch": 0.93, + "learning_rate": 6.856175055934153e-06, + "loss": 0.8532, + "step": 5561 + }, + { + "epoch": 0.93, + "learning_rate": 6.851021177081441e-06, + "loss": 0.8552, + "step": 5562 + }, + { + "epoch": 0.93, + "learning_rate": 6.845868226580598e-06, + "loss": 0.8596, + "step": 5563 + }, + { + "epoch": 0.93, + "learning_rate": 6.840716205950768e-06, + "loss": 0.8747, + "step": 5564 + }, + { + "epoch": 0.93, + "learning_rate": 6.835565116710823e-06, + "loss": 0.8892, + "step": 5565 + }, + { + "epoch": 0.93, + "learning_rate": 6.830414960379352e-06, + "loss": 0.8655, + "step": 5566 + }, + { + "epoch": 0.93, + "learning_rate": 6.82526573847468e-06, + "loss": 0.8699, + "step": 5567 + }, + { + "epoch": 0.93, + "learning_rate": 6.820117452514847e-06, + "loss": 0.9181, + "step": 5568 + }, + { + "epoch": 0.93, + "learning_rate": 6.8149701040176275e-06, + "loss": 0.9031, + "step": 5569 + }, + { + "epoch": 0.93, + "learning_rate": 6.809823694500504e-06, + "loss": 0.8947, + "step": 5570 + }, + { + "epoch": 0.93, + "learning_rate": 6.804678225480702e-06, + "loss": 0.841, + "step": 5571 + }, + { + "epoch": 0.93, + "learning_rate": 6.799533698475155e-06, + "loss": 0.8419, + "step": 5572 + }, + { + "epoch": 0.93, + "learning_rate": 6.7943901150005255e-06, + "loss": 0.842, + "step": 5573 + }, + { + "epoch": 0.93, + "learning_rate": 6.789247476573191e-06, + "loss": 0.8918, + "step": 5574 + }, + { + "epoch": 0.93, + "learning_rate": 6.784105784709262e-06, + "loss": 0.8292, + "step": 5575 + }, + { + "epoch": 0.93, + "learning_rate": 6.778965040924555e-06, + "loss": 0.8278, + "step": 5576 + }, + { + "epoch": 0.93, + "learning_rate": 6.773825246734622e-06, + "loss": 0.8463, + "step": 5577 + }, + { + "epoch": 0.94, + "learning_rate": 6.768686403654721e-06, + "loss": 0.8371, + "step": 5578 + }, + { + "epoch": 0.94, + "learning_rate": 6.763548513199842e-06, + "loss": 0.3541, + "step": 5579 + }, + { + "epoch": 0.94, + "learning_rate": 6.758411576884691e-06, + "loss": 0.8576, + "step": 5580 + }, + { + "epoch": 0.94, + "learning_rate": 6.7532755962236855e-06, + "loss": 0.3684, + "step": 5581 + }, + { + "epoch": 0.94, + "learning_rate": 6.748140572730968e-06, + "loss": 0.8637, + "step": 5582 + }, + { + "epoch": 0.94, + "learning_rate": 6.743006507920396e-06, + "loss": 0.8736, + "step": 5583 + }, + { + "epoch": 0.94, + "learning_rate": 6.737873403305548e-06, + "loss": 0.8683, + "step": 5584 + }, + { + "epoch": 0.94, + "learning_rate": 6.732741260399713e-06, + "loss": 0.8836, + "step": 5585 + }, + { + "epoch": 0.94, + "learning_rate": 6.7276100807159e-06, + "loss": 0.9106, + "step": 5586 + }, + { + "epoch": 0.94, + "learning_rate": 6.72247986576684e-06, + "loss": 0.8502, + "step": 5587 + }, + { + "epoch": 0.94, + "learning_rate": 6.717350617064973e-06, + "loss": 0.8449, + "step": 5588 + }, + { + "epoch": 0.94, + "learning_rate": 6.712222336122452e-06, + "loss": 0.904, + "step": 5589 + }, + { + "epoch": 0.94, + "learning_rate": 6.707095024451149e-06, + "loss": 0.8754, + "step": 5590 + }, + { + "epoch": 0.94, + "learning_rate": 6.701968683562649e-06, + "loss": 0.9063, + "step": 5591 + }, + { + "epoch": 0.94, + "learning_rate": 6.696843314968251e-06, + "loss": 0.9085, + "step": 5592 + }, + { + "epoch": 0.94, + "learning_rate": 6.691718920178967e-06, + "loss": 0.8555, + "step": 5593 + }, + { + "epoch": 0.94, + "learning_rate": 6.68659550070552e-06, + "loss": 0.4048, + "step": 5594 + }, + { + "epoch": 0.94, + "learning_rate": 6.681473058058352e-06, + "loss": 0.9135, + "step": 5595 + }, + { + "epoch": 0.94, + "learning_rate": 6.676351593747611e-06, + "loss": 0.8987, + "step": 5596 + }, + { + "epoch": 0.94, + "learning_rate": 6.671231109283156e-06, + "loss": 0.9664, + "step": 5597 + }, + { + "epoch": 0.94, + "learning_rate": 6.666111606174564e-06, + "loss": 0.883, + "step": 5598 + }, + { + "epoch": 0.94, + "learning_rate": 6.660993085931113e-06, + "loss": 0.9381, + "step": 5599 + }, + { + "epoch": 0.94, + "learning_rate": 6.6558755500618e-06, + "loss": 0.8672, + "step": 5600 + }, + { + "epoch": 0.94, + "learning_rate": 6.650759000075324e-06, + "loss": 0.9144, + "step": 5601 + }, + { + "epoch": 0.94, + "learning_rate": 6.6456434374800986e-06, + "loss": 0.8572, + "step": 5602 + }, + { + "epoch": 0.94, + "learning_rate": 6.64052886378425e-06, + "loss": 0.8568, + "step": 5603 + }, + { + "epoch": 0.94, + "learning_rate": 6.635415280495603e-06, + "loss": 0.9471, + "step": 5604 + }, + { + "epoch": 0.94, + "learning_rate": 6.630302689121698e-06, + "loss": 0.9191, + "step": 5605 + }, + { + "epoch": 0.94, + "learning_rate": 6.625191091169778e-06, + "loss": 0.8212, + "step": 5606 + }, + { + "epoch": 0.94, + "learning_rate": 6.6200804881467975e-06, + "loss": 0.8919, + "step": 5607 + }, + { + "epoch": 0.94, + "learning_rate": 6.614970881559413e-06, + "loss": 0.8477, + "step": 5608 + }, + { + "epoch": 0.94, + "learning_rate": 6.609862272913994e-06, + "loss": 0.9232, + "step": 5609 + }, + { + "epoch": 0.94, + "learning_rate": 6.604754663716604e-06, + "loss": 0.8577, + "step": 5610 + }, + { + "epoch": 0.94, + "learning_rate": 6.599648055473029e-06, + "loss": 0.3778, + "step": 5611 + }, + { + "epoch": 0.94, + "learning_rate": 6.5945424496887465e-06, + "loss": 0.8933, + "step": 5612 + }, + { + "epoch": 0.94, + "learning_rate": 6.5894378478689446e-06, + "loss": 0.8134, + "step": 5613 + }, + { + "epoch": 0.94, + "learning_rate": 6.5843342515185075e-06, + "loss": 0.8729, + "step": 5614 + }, + { + "epoch": 0.94, + "learning_rate": 6.579231662142036e-06, + "loss": 0.9439, + "step": 5615 + }, + { + "epoch": 0.94, + "learning_rate": 6.574130081243821e-06, + "loss": 0.8484, + "step": 5616 + }, + { + "epoch": 0.94, + "learning_rate": 6.569029510327864e-06, + "loss": 0.8945, + "step": 5617 + }, + { + "epoch": 0.94, + "learning_rate": 6.563929950897867e-06, + "loss": 0.8616, + "step": 5618 + }, + { + "epoch": 0.94, + "learning_rate": 6.558831404457229e-06, + "loss": 0.8758, + "step": 5619 + }, + { + "epoch": 0.94, + "learning_rate": 6.553733872509064e-06, + "loss": 0.871, + "step": 5620 + }, + { + "epoch": 0.94, + "learning_rate": 6.548637356556171e-06, + "loss": 0.9101, + "step": 5621 + }, + { + "epoch": 0.94, + "learning_rate": 6.543541858101057e-06, + "loss": 0.9335, + "step": 5622 + }, + { + "epoch": 0.94, + "learning_rate": 6.53844737864593e-06, + "loss": 0.8451, + "step": 5623 + }, + { + "epoch": 0.94, + "learning_rate": 6.533353919692693e-06, + "loss": 0.9009, + "step": 5624 + }, + { + "epoch": 0.94, + "learning_rate": 6.528261482742956e-06, + "loss": 0.9062, + "step": 5625 + }, + { + "epoch": 0.94, + "learning_rate": 6.523170069298017e-06, + "loss": 0.8976, + "step": 5626 + }, + { + "epoch": 0.94, + "learning_rate": 6.518079680858877e-06, + "loss": 0.9505, + "step": 5627 + }, + { + "epoch": 0.94, + "learning_rate": 6.512990318926243e-06, + "loss": 0.8679, + "step": 5628 + }, + { + "epoch": 0.94, + "learning_rate": 6.507901985000506e-06, + "loss": 0.9139, + "step": 5629 + }, + { + "epoch": 0.94, + "learning_rate": 6.502814680581765e-06, + "loss": 0.906, + "step": 5630 + }, + { + "epoch": 0.94, + "learning_rate": 6.497728407169805e-06, + "loss": 0.8236, + "step": 5631 + }, + { + "epoch": 0.94, + "learning_rate": 6.492643166264116e-06, + "loss": 0.8794, + "step": 5632 + }, + { + "epoch": 0.94, + "learning_rate": 6.487558959363879e-06, + "loss": 0.8814, + "step": 5633 + }, + { + "epoch": 0.94, + "learning_rate": 6.482475787967972e-06, + "loss": 0.843, + "step": 5634 + }, + { + "epoch": 0.94, + "learning_rate": 6.4773936535749615e-06, + "loss": 0.8814, + "step": 5635 + }, + { + "epoch": 0.94, + "learning_rate": 6.472312557683125e-06, + "loss": 0.9117, + "step": 5636 + }, + { + "epoch": 0.95, + "learning_rate": 6.467232501790414e-06, + "loss": 0.8266, + "step": 5637 + }, + { + "epoch": 0.95, + "learning_rate": 6.462153487394487e-06, + "loss": 0.3364, + "step": 5638 + }, + { + "epoch": 0.95, + "learning_rate": 6.457075515992686e-06, + "loss": 0.8903, + "step": 5639 + }, + { + "epoch": 0.95, + "learning_rate": 6.451998589082054e-06, + "loss": 0.9185, + "step": 5640 + }, + { + "epoch": 0.95, + "learning_rate": 6.446922708159319e-06, + "loss": 0.8701, + "step": 5641 + }, + { + "epoch": 0.95, + "learning_rate": 6.441847874720907e-06, + "loss": 0.8712, + "step": 5642 + }, + { + "epoch": 0.95, + "learning_rate": 6.436774090262925e-06, + "loss": 0.8352, + "step": 5643 + }, + { + "epoch": 0.95, + "learning_rate": 6.431701356281186e-06, + "loss": 0.9118, + "step": 5644 + }, + { + "epoch": 0.95, + "learning_rate": 6.426629674271183e-06, + "loss": 0.9322, + "step": 5645 + }, + { + "epoch": 0.95, + "learning_rate": 6.4215590457280986e-06, + "loss": 0.8492, + "step": 5646 + }, + { + "epoch": 0.95, + "learning_rate": 6.41648947214681e-06, + "loss": 0.8297, + "step": 5647 + }, + { + "epoch": 0.95, + "learning_rate": 6.4114209550218755e-06, + "loss": 0.8626, + "step": 5648 + }, + { + "epoch": 0.95, + "learning_rate": 6.40635349584755e-06, + "loss": 0.9214, + "step": 5649 + }, + { + "epoch": 0.95, + "learning_rate": 6.401287096117776e-06, + "loss": 0.8538, + "step": 5650 + }, + { + "epoch": 0.95, + "learning_rate": 6.396221757326175e-06, + "loss": 0.8495, + "step": 5651 + }, + { + "epoch": 0.95, + "learning_rate": 6.391157480966066e-06, + "loss": 0.9119, + "step": 5652 + }, + { + "epoch": 0.95, + "learning_rate": 6.386094268530454e-06, + "loss": 0.8461, + "step": 5653 + }, + { + "epoch": 0.95, + "learning_rate": 6.381032121512018e-06, + "loss": 0.9249, + "step": 5654 + }, + { + "epoch": 0.95, + "learning_rate": 6.375971041403142e-06, + "loss": 0.8807, + "step": 5655 + }, + { + "epoch": 0.95, + "learning_rate": 6.370911029695877e-06, + "loss": 0.8934, + "step": 5656 + }, + { + "epoch": 0.95, + "learning_rate": 6.36585208788197e-06, + "loss": 0.8581, + "step": 5657 + }, + { + "epoch": 0.95, + "learning_rate": 6.36079421745285e-06, + "loss": 0.8184, + "step": 5658 + }, + { + "epoch": 0.95, + "learning_rate": 6.355737419899628e-06, + "loss": 0.8845, + "step": 5659 + }, + { + "epoch": 0.95, + "learning_rate": 6.350681696713106e-06, + "loss": 0.8886, + "step": 5660 + }, + { + "epoch": 0.95, + "learning_rate": 6.345627049383763e-06, + "loss": 0.8638, + "step": 5661 + }, + { + "epoch": 0.95, + "learning_rate": 6.340573479401757e-06, + "loss": 0.8571, + "step": 5662 + }, + { + "epoch": 0.95, + "learning_rate": 6.33552098825694e-06, + "loss": 0.9168, + "step": 5663 + }, + { + "epoch": 0.95, + "learning_rate": 6.3304695774388315e-06, + "loss": 0.888, + "step": 5664 + }, + { + "epoch": 0.95, + "learning_rate": 6.325419248436648e-06, + "loss": 0.8817, + "step": 5665 + }, + { + "epoch": 0.95, + "learning_rate": 6.3203700027392725e-06, + "loss": 0.8578, + "step": 5666 + }, + { + "epoch": 0.95, + "learning_rate": 6.3153218418352755e-06, + "loss": 0.8626, + "step": 5667 + }, + { + "epoch": 0.95, + "learning_rate": 6.310274767212916e-06, + "loss": 0.8789, + "step": 5668 + }, + { + "epoch": 0.95, + "learning_rate": 6.305228780360115e-06, + "loss": 0.8433, + "step": 5669 + }, + { + "epoch": 0.95, + "learning_rate": 6.3001838827644905e-06, + "loss": 0.8574, + "step": 5670 + }, + { + "epoch": 0.95, + "learning_rate": 6.295140075913323e-06, + "loss": 0.8267, + "step": 5671 + }, + { + "epoch": 0.95, + "learning_rate": 6.290097361293585e-06, + "loss": 0.8204, + "step": 5672 + }, + { + "epoch": 0.95, + "learning_rate": 6.285055740391916e-06, + "loss": 0.8596, + "step": 5673 + }, + { + "epoch": 0.95, + "learning_rate": 6.2800152146946445e-06, + "loss": 0.8935, + "step": 5674 + }, + { + "epoch": 0.95, + "learning_rate": 6.274975785687764e-06, + "loss": 0.7979, + "step": 5675 + }, + { + "epoch": 0.95, + "learning_rate": 6.269937454856961e-06, + "loss": 0.9406, + "step": 5676 + }, + { + "epoch": 0.95, + "learning_rate": 6.264900223687577e-06, + "loss": 0.8551, + "step": 5677 + }, + { + "epoch": 0.95, + "learning_rate": 6.259864093664647e-06, + "loss": 0.8248, + "step": 5678 + }, + { + "epoch": 0.95, + "learning_rate": 6.254829066272872e-06, + "loss": 0.9155, + "step": 5679 + }, + { + "epoch": 0.95, + "learning_rate": 6.249795142996633e-06, + "loss": 0.8695, + "step": 5680 + }, + { + "epoch": 0.95, + "learning_rate": 6.24476232531998e-06, + "loss": 0.8848, + "step": 5681 + }, + { + "epoch": 0.95, + "learning_rate": 6.2397306147266425e-06, + "loss": 0.9263, + "step": 5682 + }, + { + "epoch": 0.95, + "learning_rate": 6.2347000127000205e-06, + "loss": 0.9325, + "step": 5683 + }, + { + "epoch": 0.95, + "learning_rate": 6.229670520723185e-06, + "loss": 0.9082, + "step": 5684 + }, + { + "epoch": 0.95, + "learning_rate": 6.22464214027889e-06, + "loss": 0.8185, + "step": 5685 + }, + { + "epoch": 0.95, + "learning_rate": 6.21961487284955e-06, + "loss": 0.3591, + "step": 5686 + }, + { + "epoch": 0.95, + "learning_rate": 6.214588719917256e-06, + "loss": 0.8922, + "step": 5687 + }, + { + "epoch": 0.95, + "learning_rate": 6.209563682963773e-06, + "loss": 0.8567, + "step": 5688 + }, + { + "epoch": 0.95, + "learning_rate": 6.204539763470531e-06, + "loss": 0.8697, + "step": 5689 + }, + { + "epoch": 0.95, + "learning_rate": 6.199516962918637e-06, + "loss": 0.8875, + "step": 5690 + }, + { + "epoch": 0.95, + "learning_rate": 6.1944952827888615e-06, + "loss": 0.8642, + "step": 5691 + }, + { + "epoch": 0.95, + "learning_rate": 6.189474724561648e-06, + "loss": 0.8089, + "step": 5692 + }, + { + "epoch": 0.95, + "learning_rate": 6.184455289717117e-06, + "loss": 0.9201, + "step": 5693 + }, + { + "epoch": 0.95, + "learning_rate": 6.1794369797350426e-06, + "loss": 0.878, + "step": 5694 + }, + { + "epoch": 0.95, + "learning_rate": 6.17441979609488e-06, + "loss": 0.9034, + "step": 5695 + }, + { + "epoch": 0.95, + "learning_rate": 6.169403740275743e-06, + "loss": 0.8409, + "step": 5696 + }, + { + "epoch": 0.96, + "learning_rate": 6.164388813756421e-06, + "loss": 0.8317, + "step": 5697 + }, + { + "epoch": 0.96, + "learning_rate": 6.159375018015365e-06, + "loss": 0.898, + "step": 5698 + }, + { + "epoch": 0.96, + "learning_rate": 6.1543623545306944e-06, + "loss": 0.8555, + "step": 5699 + }, + { + "epoch": 0.96, + "learning_rate": 6.149350824780189e-06, + "loss": 0.9101, + "step": 5700 + }, + { + "epoch": 0.96, + "learning_rate": 6.144340430241313e-06, + "loss": 0.8609, + "step": 5701 + }, + { + "epoch": 0.96, + "learning_rate": 6.139331172391174e-06, + "loss": 0.8265, + "step": 5702 + }, + { + "epoch": 0.96, + "learning_rate": 6.134323052706557e-06, + "loss": 0.8706, + "step": 5703 + }, + { + "epoch": 0.96, + "learning_rate": 6.129316072663906e-06, + "loss": 0.3208, + "step": 5704 + }, + { + "epoch": 0.96, + "learning_rate": 6.124310233739332e-06, + "loss": 0.873, + "step": 5705 + }, + { + "epoch": 0.96, + "learning_rate": 6.119305537408609e-06, + "loss": 0.8561, + "step": 5706 + }, + { + "epoch": 0.96, + "learning_rate": 6.114301985147173e-06, + "loss": 0.8416, + "step": 5707 + }, + { + "epoch": 0.96, + "learning_rate": 6.109299578430119e-06, + "loss": 0.8716, + "step": 5708 + }, + { + "epoch": 0.96, + "learning_rate": 6.104298318732218e-06, + "loss": 0.8688, + "step": 5709 + }, + { + "epoch": 0.96, + "learning_rate": 6.099298207527888e-06, + "loss": 0.9412, + "step": 5710 + }, + { + "epoch": 0.96, + "learning_rate": 6.094299246291215e-06, + "loss": 0.9396, + "step": 5711 + }, + { + "epoch": 0.96, + "learning_rate": 6.089301436495947e-06, + "loss": 0.8926, + "step": 5712 + }, + { + "epoch": 0.96, + "learning_rate": 6.084304779615489e-06, + "loss": 0.8932, + "step": 5713 + }, + { + "epoch": 0.96, + "learning_rate": 6.0793092771229045e-06, + "loss": 0.8568, + "step": 5714 + }, + { + "epoch": 0.96, + "learning_rate": 6.074314930490925e-06, + "loss": 0.8409, + "step": 5715 + }, + { + "epoch": 0.96, + "learning_rate": 6.0693217411919305e-06, + "loss": 0.8984, + "step": 5716 + }, + { + "epoch": 0.96, + "learning_rate": 6.0643297106979705e-06, + "loss": 0.9165, + "step": 5717 + }, + { + "epoch": 0.96, + "learning_rate": 6.059338840480749e-06, + "loss": 0.8358, + "step": 5718 + }, + { + "epoch": 0.96, + "learning_rate": 6.0543491320116185e-06, + "loss": 0.3746, + "step": 5719 + }, + { + "epoch": 0.96, + "learning_rate": 6.049360586761608e-06, + "loss": 0.8449, + "step": 5720 + }, + { + "epoch": 0.96, + "learning_rate": 6.0443732062013835e-06, + "loss": 0.8981, + "step": 5721 + }, + { + "epoch": 0.96, + "learning_rate": 6.039386991801282e-06, + "loss": 0.8083, + "step": 5722 + }, + { + "epoch": 0.96, + "learning_rate": 6.034401945031289e-06, + "loss": 0.3509, + "step": 5723 + }, + { + "epoch": 0.96, + "learning_rate": 6.0294180673610485e-06, + "loss": 0.8659, + "step": 5724 + }, + { + "epoch": 0.96, + "learning_rate": 6.024435360259862e-06, + "loss": 0.8617, + "step": 5725 + }, + { + "epoch": 0.96, + "learning_rate": 6.019453825196684e-06, + "loss": 0.9579, + "step": 5726 + }, + { + "epoch": 0.96, + "learning_rate": 6.01447346364012e-06, + "loss": 0.8819, + "step": 5727 + }, + { + "epoch": 0.96, + "learning_rate": 6.009494277058436e-06, + "loss": 0.9217, + "step": 5728 + }, + { + "epoch": 0.96, + "learning_rate": 6.004516266919546e-06, + "loss": 0.8733, + "step": 5729 + }, + { + "epoch": 0.96, + "learning_rate": 5.999539434691019e-06, + "loss": 0.8777, + "step": 5730 + }, + { + "epoch": 0.96, + "learning_rate": 5.99456378184008e-06, + "loss": 0.9133, + "step": 5731 + }, + { + "epoch": 0.96, + "learning_rate": 5.989589309833596e-06, + "loss": 0.8446, + "step": 5732 + }, + { + "epoch": 0.96, + "learning_rate": 5.984616020138103e-06, + "loss": 0.8915, + "step": 5733 + }, + { + "epoch": 0.96, + "learning_rate": 5.979643914219775e-06, + "loss": 0.9103, + "step": 5734 + }, + { + "epoch": 0.96, + "learning_rate": 5.974672993544439e-06, + "loss": 0.8681, + "step": 5735 + }, + { + "epoch": 0.96, + "learning_rate": 5.969703259577577e-06, + "loss": 0.8791, + "step": 5736 + }, + { + "epoch": 0.96, + "learning_rate": 5.964734713784315e-06, + "loss": 0.8668, + "step": 5737 + }, + { + "epoch": 0.96, + "learning_rate": 5.959767357629438e-06, + "loss": 0.8951, + "step": 5738 + }, + { + "epoch": 0.96, + "learning_rate": 5.954801192577368e-06, + "loss": 0.9333, + "step": 5739 + }, + { + "epoch": 0.96, + "learning_rate": 5.949836220092184e-06, + "loss": 0.8899, + "step": 5740 + }, + { + "epoch": 0.96, + "learning_rate": 5.944872441637617e-06, + "loss": 0.3802, + "step": 5741 + }, + { + "epoch": 0.96, + "learning_rate": 5.939909858677036e-06, + "loss": 0.8581, + "step": 5742 + }, + { + "epoch": 0.96, + "learning_rate": 5.9349484726734625e-06, + "loss": 0.8793, + "step": 5743 + }, + { + "epoch": 0.96, + "learning_rate": 5.929988285089566e-06, + "loss": 0.8484, + "step": 5744 + }, + { + "epoch": 0.96, + "learning_rate": 5.925029297387663e-06, + "loss": 0.919, + "step": 5745 + }, + { + "epoch": 0.96, + "learning_rate": 5.9200715110297105e-06, + "loss": 0.8323, + "step": 5746 + }, + { + "epoch": 0.96, + "learning_rate": 5.915114927477321e-06, + "loss": 0.8099, + "step": 5747 + }, + { + "epoch": 0.96, + "learning_rate": 5.910159548191743e-06, + "loss": 0.9028, + "step": 5748 + }, + { + "epoch": 0.96, + "learning_rate": 5.905205374633874e-06, + "loss": 0.8664, + "step": 5749 + }, + { + "epoch": 0.96, + "learning_rate": 5.900252408264258e-06, + "loss": 0.8611, + "step": 5750 + }, + { + "epoch": 0.96, + "learning_rate": 5.895300650543085e-06, + "loss": 0.8454, + "step": 5751 + }, + { + "epoch": 0.96, + "learning_rate": 5.890350102930178e-06, + "loss": 0.8532, + "step": 5752 + }, + { + "epoch": 0.96, + "learning_rate": 5.885400766885016e-06, + "loss": 0.8994, + "step": 5753 + }, + { + "epoch": 0.96, + "learning_rate": 5.88045264386671e-06, + "loss": 0.9772, + "step": 5754 + }, + { + "epoch": 0.96, + "learning_rate": 5.8755057353340216e-06, + "loss": 0.9019, + "step": 5755 + }, + { + "epoch": 0.96, + "learning_rate": 5.87056004274535e-06, + "loss": 0.866, + "step": 5756 + }, + { + "epoch": 0.97, + "learning_rate": 5.865615567558733e-06, + "loss": 0.3543, + "step": 5757 + }, + { + "epoch": 0.97, + "learning_rate": 5.860672311231863e-06, + "loss": 0.8466, + "step": 5758 + }, + { + "epoch": 0.97, + "learning_rate": 5.855730275222057e-06, + "loss": 0.9183, + "step": 5759 + }, + { + "epoch": 0.97, + "learning_rate": 5.850789460986281e-06, + "loss": 0.9272, + "step": 5760 + }, + { + "epoch": 0.97, + "learning_rate": 5.845849869981137e-06, + "loss": 0.857, + "step": 5761 + }, + { + "epoch": 0.97, + "learning_rate": 5.84091150366287e-06, + "loss": 0.8719, + "step": 5762 + }, + { + "epoch": 0.97, + "learning_rate": 5.83597436348736e-06, + "loss": 0.8862, + "step": 5763 + }, + { + "epoch": 0.97, + "learning_rate": 5.83103845091013e-06, + "loss": 0.8729, + "step": 5764 + }, + { + "epoch": 0.97, + "learning_rate": 5.826103767386333e-06, + "loss": 0.8607, + "step": 5765 + }, + { + "epoch": 0.97, + "learning_rate": 5.82117031437077e-06, + "loss": 0.8802, + "step": 5766 + }, + { + "epoch": 0.97, + "learning_rate": 5.816238093317878e-06, + "loss": 0.8898, + "step": 5767 + }, + { + "epoch": 0.97, + "learning_rate": 5.811307105681723e-06, + "loss": 0.8294, + "step": 5768 + }, + { + "epoch": 0.97, + "learning_rate": 5.806377352916012e-06, + "loss": 0.8447, + "step": 5769 + }, + { + "epoch": 0.97, + "learning_rate": 5.801448836474084e-06, + "loss": 0.8393, + "step": 5770 + }, + { + "epoch": 0.97, + "learning_rate": 5.796521557808923e-06, + "loss": 0.9062, + "step": 5771 + }, + { + "epoch": 0.97, + "learning_rate": 5.791595518373141e-06, + "loss": 0.8578, + "step": 5772 + }, + { + "epoch": 0.97, + "learning_rate": 5.786670719618982e-06, + "loss": 0.8511, + "step": 5773 + }, + { + "epoch": 0.97, + "learning_rate": 5.781747162998335e-06, + "loss": 0.9021, + "step": 5774 + }, + { + "epoch": 0.97, + "learning_rate": 5.776824849962706e-06, + "loss": 0.8987, + "step": 5775 + }, + { + "epoch": 0.97, + "learning_rate": 5.771903781963255e-06, + "loss": 0.8909, + "step": 5776 + }, + { + "epoch": 0.97, + "learning_rate": 5.766983960450761e-06, + "loss": 0.8616, + "step": 5777 + }, + { + "epoch": 0.97, + "learning_rate": 5.762065386875633e-06, + "loss": 0.8854, + "step": 5778 + }, + { + "epoch": 0.97, + "learning_rate": 5.757148062687925e-06, + "loss": 0.8731, + "step": 5779 + }, + { + "epoch": 0.97, + "learning_rate": 5.7522319893373136e-06, + "loss": 0.8896, + "step": 5780 + }, + { + "epoch": 0.97, + "learning_rate": 5.747317168273103e-06, + "loss": 0.9134, + "step": 5781 + }, + { + "epoch": 0.97, + "learning_rate": 5.742403600944244e-06, + "loss": 0.8365, + "step": 5782 + }, + { + "epoch": 0.97, + "learning_rate": 5.737491288799299e-06, + "loss": 0.9008, + "step": 5783 + }, + { + "epoch": 0.97, + "learning_rate": 5.732580233286476e-06, + "loss": 0.8787, + "step": 5784 + }, + { + "epoch": 0.97, + "learning_rate": 5.727670435853601e-06, + "loss": 0.8701, + "step": 5785 + }, + { + "epoch": 0.97, + "learning_rate": 5.722761897948133e-06, + "loss": 0.9042, + "step": 5786 + }, + { + "epoch": 0.97, + "learning_rate": 5.7178546210171596e-06, + "loss": 0.892, + "step": 5787 + }, + { + "epoch": 0.97, + "learning_rate": 5.712948606507403e-06, + "loss": 0.8437, + "step": 5788 + }, + { + "epoch": 0.97, + "learning_rate": 5.7080438558652e-06, + "loss": 0.8394, + "step": 5789 + }, + { + "epoch": 0.97, + "learning_rate": 5.703140370536529e-06, + "loss": 0.8392, + "step": 5790 + }, + { + "epoch": 0.97, + "learning_rate": 5.698238151966983e-06, + "loss": 0.8511, + "step": 5791 + }, + { + "epoch": 0.97, + "learning_rate": 5.693337201601794e-06, + "loss": 0.8301, + "step": 5792 + }, + { + "epoch": 0.97, + "learning_rate": 5.68843752088581e-06, + "loss": 0.9452, + "step": 5793 + }, + { + "epoch": 0.97, + "learning_rate": 5.683539111263508e-06, + "loss": 0.9207, + "step": 5794 + }, + { + "epoch": 0.97, + "learning_rate": 5.678641974178984e-06, + "loss": 0.886, + "step": 5795 + }, + { + "epoch": 0.97, + "learning_rate": 5.673746111075978e-06, + "loss": 0.7983, + "step": 5796 + }, + { + "epoch": 0.97, + "learning_rate": 5.668851523397829e-06, + "loss": 0.9028, + "step": 5797 + }, + { + "epoch": 0.97, + "learning_rate": 5.663958212587524e-06, + "loss": 0.792, + "step": 5798 + }, + { + "epoch": 0.97, + "learning_rate": 5.6590661800876555e-06, + "loss": 0.3674, + "step": 5799 + }, + { + "epoch": 0.97, + "learning_rate": 5.654175427340444e-06, + "loss": 0.9345, + "step": 5800 + }, + { + "epoch": 0.97, + "learning_rate": 5.64928595578774e-06, + "loss": 0.7977, + "step": 5801 + }, + { + "epoch": 0.97, + "learning_rate": 5.64439776687101e-06, + "loss": 0.9264, + "step": 5802 + }, + { + "epoch": 0.97, + "learning_rate": 5.639510862031337e-06, + "loss": 0.8669, + "step": 5803 + }, + { + "epoch": 0.97, + "learning_rate": 5.634625242709439e-06, + "loss": 0.8988, + "step": 5804 + }, + { + "epoch": 0.97, + "learning_rate": 5.629740910345642e-06, + "loss": 0.8643, + "step": 5805 + }, + { + "epoch": 0.97, + "learning_rate": 5.624857866379905e-06, + "loss": 0.8671, + "step": 5806 + }, + { + "epoch": 0.97, + "learning_rate": 5.619976112251796e-06, + "loss": 0.8378, + "step": 5807 + }, + { + "epoch": 0.97, + "learning_rate": 5.615095649400504e-06, + "loss": 0.8982, + "step": 5808 + }, + { + "epoch": 0.97, + "learning_rate": 5.610216479264847e-06, + "loss": 0.8502, + "step": 5809 + }, + { + "epoch": 0.97, + "learning_rate": 5.605338603283253e-06, + "loss": 0.8214, + "step": 5810 + }, + { + "epoch": 0.97, + "learning_rate": 5.60046202289377e-06, + "loss": 0.3585, + "step": 5811 + }, + { + "epoch": 0.97, + "learning_rate": 5.595586739534062e-06, + "loss": 0.9325, + "step": 5812 + }, + { + "epoch": 0.97, + "learning_rate": 5.590712754641418e-06, + "loss": 0.9018, + "step": 5813 + }, + { + "epoch": 0.97, + "learning_rate": 5.585840069652737e-06, + "loss": 0.9667, + "step": 5814 + }, + { + "epoch": 0.97, + "learning_rate": 5.580968686004542e-06, + "loss": 0.8401, + "step": 5815 + }, + { + "epoch": 0.98, + "learning_rate": 5.576098605132959e-06, + "loss": 0.8779, + "step": 5816 + }, + { + "epoch": 0.98, + "learning_rate": 5.57122982847375e-06, + "loss": 0.9214, + "step": 5817 + }, + { + "epoch": 0.98, + "learning_rate": 5.566362357462274e-06, + "loss": 0.8737, + "step": 5818 + }, + { + "epoch": 0.98, + "learning_rate": 5.561496193533516e-06, + "loss": 0.9016, + "step": 5819 + }, + { + "epoch": 0.98, + "learning_rate": 5.556631338122063e-06, + "loss": 0.9235, + "step": 5820 + }, + { + "epoch": 0.98, + "learning_rate": 5.5517677926621375e-06, + "loss": 0.8959, + "step": 5821 + }, + { + "epoch": 0.98, + "learning_rate": 5.546905558587554e-06, + "loss": 0.8982, + "step": 5822 + }, + { + "epoch": 0.98, + "learning_rate": 5.542044637331758e-06, + "loss": 0.938, + "step": 5823 + }, + { + "epoch": 0.98, + "learning_rate": 5.537185030327795e-06, + "loss": 0.8698, + "step": 5824 + }, + { + "epoch": 0.98, + "learning_rate": 5.5323267390083256e-06, + "loss": 0.8499, + "step": 5825 + }, + { + "epoch": 0.98, + "learning_rate": 5.527469764805632e-06, + "loss": 0.9063, + "step": 5826 + }, + { + "epoch": 0.98, + "learning_rate": 5.522614109151596e-06, + "loss": 0.9022, + "step": 5827 + }, + { + "epoch": 0.98, + "learning_rate": 5.517759773477714e-06, + "loss": 0.8641, + "step": 5828 + }, + { + "epoch": 0.98, + "learning_rate": 5.512906759215102e-06, + "loss": 0.8819, + "step": 5829 + }, + { + "epoch": 0.98, + "learning_rate": 5.508055067794469e-06, + "loss": 0.8996, + "step": 5830 + }, + { + "epoch": 0.98, + "learning_rate": 5.5032047006461555e-06, + "loss": 0.8468, + "step": 5831 + }, + { + "epoch": 0.98, + "learning_rate": 5.4983556592000945e-06, + "loss": 0.9117, + "step": 5832 + }, + { + "epoch": 0.98, + "learning_rate": 5.4935079448858305e-06, + "loss": 0.394, + "step": 5833 + }, + { + "epoch": 0.98, + "learning_rate": 5.488661559132529e-06, + "loss": 0.8593, + "step": 5834 + }, + { + "epoch": 0.98, + "learning_rate": 5.483816503368951e-06, + "loss": 0.8538, + "step": 5835 + }, + { + "epoch": 0.98, + "learning_rate": 5.47897277902347e-06, + "loss": 0.8619, + "step": 5836 + }, + { + "epoch": 0.98, + "learning_rate": 5.47413038752406e-06, + "loss": 0.8744, + "step": 5837 + }, + { + "epoch": 0.98, + "learning_rate": 5.469289330298315e-06, + "loss": 0.8628, + "step": 5838 + }, + { + "epoch": 0.98, + "learning_rate": 5.464449608773434e-06, + "loss": 0.8283, + "step": 5839 + }, + { + "epoch": 0.98, + "learning_rate": 5.459611224376211e-06, + "loss": 0.8895, + "step": 5840 + }, + { + "epoch": 0.98, + "learning_rate": 5.454774178533051e-06, + "loss": 0.905, + "step": 5841 + }, + { + "epoch": 0.98, + "learning_rate": 5.449938472669971e-06, + "loss": 0.908, + "step": 5842 + }, + { + "epoch": 0.98, + "learning_rate": 5.445104108212586e-06, + "loss": 0.8826, + "step": 5843 + }, + { + "epoch": 0.98, + "learning_rate": 5.440271086586115e-06, + "loss": 0.86, + "step": 5844 + }, + { + "epoch": 0.98, + "learning_rate": 5.435439409215379e-06, + "loss": 0.9313, + "step": 5845 + }, + { + "epoch": 0.98, + "learning_rate": 5.430609077524816e-06, + "loss": 0.8745, + "step": 5846 + }, + { + "epoch": 0.98, + "learning_rate": 5.425780092938455e-06, + "loss": 0.8806, + "step": 5847 + }, + { + "epoch": 0.98, + "learning_rate": 5.420952456879932e-06, + "loss": 0.7787, + "step": 5848 + }, + { + "epoch": 0.98, + "learning_rate": 5.416126170772484e-06, + "loss": 0.8613, + "step": 5849 + }, + { + "epoch": 0.98, + "learning_rate": 5.411301236038945e-06, + "loss": 0.8876, + "step": 5850 + }, + { + "epoch": 0.98, + "learning_rate": 5.406477654101765e-06, + "loss": 0.884, + "step": 5851 + }, + { + "epoch": 0.98, + "learning_rate": 5.401655426382984e-06, + "loss": 0.8416, + "step": 5852 + }, + { + "epoch": 0.98, + "learning_rate": 5.396834554304237e-06, + "loss": 0.86, + "step": 5853 + }, + { + "epoch": 0.98, + "learning_rate": 5.392015039286775e-06, + "loss": 0.8382, + "step": 5854 + }, + { + "epoch": 0.98, + "learning_rate": 5.387196882751445e-06, + "loss": 0.8296, + "step": 5855 + }, + { + "epoch": 0.98, + "learning_rate": 5.382380086118685e-06, + "loss": 0.867, + "step": 5856 + }, + { + "epoch": 0.98, + "learning_rate": 5.3775646508085364e-06, + "loss": 0.8876, + "step": 5857 + }, + { + "epoch": 0.98, + "learning_rate": 5.3727505782406375e-06, + "loss": 0.8506, + "step": 5858 + }, + { + "epoch": 0.98, + "learning_rate": 5.367937869834234e-06, + "loss": 0.8279, + "step": 5859 + }, + { + "epoch": 0.98, + "learning_rate": 5.36312652700816e-06, + "loss": 0.832, + "step": 5860 + }, + { + "epoch": 0.98, + "learning_rate": 5.358316551180849e-06, + "loss": 0.8877, + "step": 5861 + }, + { + "epoch": 0.98, + "learning_rate": 5.3535079437703275e-06, + "loss": 0.9017, + "step": 5862 + }, + { + "epoch": 0.98, + "learning_rate": 5.348700706194227e-06, + "loss": 0.8615, + "step": 5863 + }, + { + "epoch": 0.98, + "learning_rate": 5.343894839869777e-06, + "loss": 0.8507, + "step": 5864 + }, + { + "epoch": 0.98, + "learning_rate": 5.339090346213791e-06, + "loss": 0.9372, + "step": 5865 + }, + { + "epoch": 0.98, + "learning_rate": 5.334287226642683e-06, + "loss": 0.8798, + "step": 5866 + }, + { + "epoch": 0.98, + "learning_rate": 5.329485482572467e-06, + "loss": 0.8324, + "step": 5867 + }, + { + "epoch": 0.98, + "learning_rate": 5.324685115418746e-06, + "loss": 0.8483, + "step": 5868 + }, + { + "epoch": 0.98, + "learning_rate": 5.319886126596717e-06, + "loss": 0.8633, + "step": 5869 + }, + { + "epoch": 0.98, + "learning_rate": 5.31508851752117e-06, + "loss": 0.9368, + "step": 5870 + }, + { + "epoch": 0.98, + "learning_rate": 5.310292289606491e-06, + "loss": 0.8587, + "step": 5871 + }, + { + "epoch": 0.98, + "learning_rate": 5.305497444266666e-06, + "loss": 0.8539, + "step": 5872 + }, + { + "epoch": 0.98, + "learning_rate": 5.30070398291526e-06, + "loss": 0.8668, + "step": 5873 + }, + { + "epoch": 0.98, + "learning_rate": 5.295911906965434e-06, + "loss": 0.8342, + "step": 5874 + }, + { + "epoch": 0.98, + "learning_rate": 5.29112121782994e-06, + "loss": 0.8557, + "step": 5875 + }, + { + "epoch": 0.99, + "learning_rate": 5.2863319169211295e-06, + "loss": 0.9036, + "step": 5876 + }, + { + "epoch": 0.99, + "learning_rate": 5.281544005650936e-06, + "loss": 0.8626, + "step": 5877 + }, + { + "epoch": 0.99, + "learning_rate": 5.276757485430881e-06, + "loss": 0.9092, + "step": 5878 + }, + { + "epoch": 0.99, + "learning_rate": 5.271972357672086e-06, + "loss": 0.8716, + "step": 5879 + }, + { + "epoch": 0.99, + "learning_rate": 5.267188623785258e-06, + "loss": 0.9078, + "step": 5880 + }, + { + "epoch": 0.99, + "learning_rate": 5.26240628518069e-06, + "loss": 0.8853, + "step": 5881 + }, + { + "epoch": 0.99, + "learning_rate": 5.257625343268264e-06, + "loss": 0.9027, + "step": 5882 + }, + { + "epoch": 0.99, + "learning_rate": 5.25284579945745e-06, + "loss": 0.8513, + "step": 5883 + }, + { + "epoch": 0.99, + "learning_rate": 5.248067655157314e-06, + "loss": 0.8635, + "step": 5884 + }, + { + "epoch": 0.99, + "learning_rate": 5.243290911776497e-06, + "loss": 0.8535, + "step": 5885 + }, + { + "epoch": 0.99, + "learning_rate": 5.238515570723235e-06, + "loss": 0.9532, + "step": 5886 + }, + { + "epoch": 0.99, + "learning_rate": 5.233741633405345e-06, + "loss": 0.8643, + "step": 5887 + }, + { + "epoch": 0.99, + "learning_rate": 5.228969101230237e-06, + "loss": 0.8687, + "step": 5888 + }, + { + "epoch": 0.99, + "learning_rate": 5.224197975604906e-06, + "loss": 0.8835, + "step": 5889 + }, + { + "epoch": 0.99, + "learning_rate": 5.2194282579359255e-06, + "loss": 0.8968, + "step": 5890 + }, + { + "epoch": 0.99, + "learning_rate": 5.214659949629458e-06, + "loss": 0.8889, + "step": 5891 + }, + { + "epoch": 0.99, + "learning_rate": 5.209893052091255e-06, + "loss": 0.8537, + "step": 5892 + }, + { + "epoch": 0.99, + "learning_rate": 5.205127566726644e-06, + "loss": 0.8737, + "step": 5893 + }, + { + "epoch": 0.99, + "learning_rate": 5.200363494940541e-06, + "loss": 0.895, + "step": 5894 + }, + { + "epoch": 0.99, + "learning_rate": 5.19560083813744e-06, + "loss": 0.8321, + "step": 5895 + }, + { + "epoch": 0.99, + "learning_rate": 5.190839597721426e-06, + "loss": 0.9092, + "step": 5896 + }, + { + "epoch": 0.99, + "learning_rate": 5.1860797750961656e-06, + "loss": 0.3371, + "step": 5897 + }, + { + "epoch": 0.99, + "learning_rate": 5.181321371664901e-06, + "loss": 0.8638, + "step": 5898 + }, + { + "epoch": 0.99, + "learning_rate": 5.17656438883046e-06, + "loss": 0.9093, + "step": 5899 + }, + { + "epoch": 0.99, + "learning_rate": 5.171808827995245e-06, + "loss": 0.8604, + "step": 5900 + } + ], + "max_steps": 5965, + "num_train_epochs": 1, + "total_flos": 2.225173262932692e+19, + "trial_name": null, + "trial_params": null +} diff --git a/training_args.bin b/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..14ac429c07428bd663e096f7d7bc42b9e6e45897 --- /dev/null +++ b/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd91608fa1c67901a21f7495e52f38c1f455da7e3bd64c792a078e48b68c9b47 +size 5499