Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.  
							See raw diff
- .gitattributes +6 -0
- added_tokens.json +24 -0
- chat_template.json +3 -0
- checkpoint-1225/added_tokens.json +24 -0
- checkpoint-1225/chat_template.json +3 -0
- checkpoint-1225/config.json +50 -0
- checkpoint-1225/generation_config.json +14 -0
- checkpoint-1225/merges.txt +0 -0
- checkpoint-1225/model-00001-of-00004.safetensors +3 -0
- checkpoint-1225/model-00002-of-00004.safetensors +3 -0
- checkpoint-1225/model-00003-of-00004.safetensors +3 -0
- checkpoint-1225/model-00004-of-00004.safetensors +3 -0
- checkpoint-1225/model.safetensors.index.json +736 -0
- checkpoint-1225/preprocessor_config.json +29 -0
- checkpoint-1225/special_tokens_map.json +31 -0
- checkpoint-1225/tokenizer.json +3 -0
- checkpoint-1225/tokenizer_config.json +209 -0
- checkpoint-1225/trainer_state.json +0 -0
- checkpoint-1225/training_args.bin +3 -0
- checkpoint-1225/vocab.json +0 -0
- checkpoint-246/added_tokens.json +24 -0
- checkpoint-246/chat_template.json +3 -0
- checkpoint-246/config.json +50 -0
- checkpoint-246/generation_config.json +14 -0
- checkpoint-246/merges.txt +0 -0
- checkpoint-246/model-00001-of-00004.safetensors +3 -0
- checkpoint-246/model-00002-of-00004.safetensors +3 -0
- checkpoint-246/model-00003-of-00004.safetensors +3 -0
- checkpoint-246/model-00004-of-00004.safetensors +3 -0
- checkpoint-246/model.safetensors.index.json +736 -0
- checkpoint-246/preprocessor_config.json +29 -0
- checkpoint-246/special_tokens_map.json +31 -0
- checkpoint-246/tokenizer.json +3 -0
- checkpoint-246/tokenizer_config.json +209 -0
- checkpoint-246/trainer_state.json +1755 -0
- checkpoint-246/training_args.bin +3 -0
- checkpoint-246/vocab.json +0 -0
- checkpoint-492/added_tokens.json +24 -0
- checkpoint-492/chat_template.json +3 -0
- checkpoint-492/config.json +50 -0
- checkpoint-492/generation_config.json +14 -0
- checkpoint-492/merges.txt +0 -0
- checkpoint-492/model-00001-of-00004.safetensors +3 -0
- checkpoint-492/model-00002-of-00004.safetensors +3 -0
- checkpoint-492/model-00003-of-00004.safetensors +3 -0
- checkpoint-492/model-00004-of-00004.safetensors +3 -0
- checkpoint-492/model.safetensors.index.json +736 -0
- checkpoint-492/preprocessor_config.json +29 -0
- checkpoint-492/special_tokens_map.json +31 -0
- checkpoint-492/tokenizer.json +3 -0
    	
        .gitattributes
    CHANGED
    
    | @@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
| 36 | 
            +
            tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
| 37 | 
            +
            checkpoint-984/tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
| 38 | 
            +
            checkpoint-1225/tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
| 39 | 
            +
            checkpoint-492/tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
| 40 | 
            +
            checkpoint-738/tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
| 41 | 
            +
            checkpoint-246/tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
    	
        added_tokens.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "</tool_call>": 151658,
         | 
| 3 | 
            +
              "<tool_call>": 151657,
         | 
| 4 | 
            +
              "<|box_end|>": 151649,
         | 
| 5 | 
            +
              "<|box_start|>": 151648,
         | 
| 6 | 
            +
              "<|endoftext|>": 151643,
         | 
| 7 | 
            +
              "<|file_sep|>": 151664,
         | 
| 8 | 
            +
              "<|fim_middle|>": 151660,
         | 
| 9 | 
            +
              "<|fim_pad|>": 151662,
         | 
| 10 | 
            +
              "<|fim_prefix|>": 151659,
         | 
| 11 | 
            +
              "<|fim_suffix|>": 151661,
         | 
| 12 | 
            +
              "<|im_end|>": 151645,
         | 
| 13 | 
            +
              "<|im_start|>": 151644,
         | 
| 14 | 
            +
              "<|image_pad|>": 151655,
         | 
| 15 | 
            +
              "<|object_ref_end|>": 151647,
         | 
| 16 | 
            +
              "<|object_ref_start|>": 151646,
         | 
| 17 | 
            +
              "<|quad_end|>": 151651,
         | 
| 18 | 
            +
              "<|quad_start|>": 151650,
         | 
| 19 | 
            +
              "<|repo_name|>": 151663,
         | 
| 20 | 
            +
              "<|video_pad|>": 151656,
         | 
| 21 | 
            +
              "<|vision_end|>": 151653,
         | 
| 22 | 
            +
              "<|vision_pad|>": 151654,
         | 
| 23 | 
            +
              "<|vision_start|>": 151652
         | 
| 24 | 
            +
            }
         | 
    	
        chat_template.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
         | 
| 3 | 
            +
            }
         | 
    	
        checkpoint-1225/added_tokens.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "</tool_call>": 151658,
         | 
| 3 | 
            +
              "<tool_call>": 151657,
         | 
| 4 | 
            +
              "<|box_end|>": 151649,
         | 
| 5 | 
            +
              "<|box_start|>": 151648,
         | 
| 6 | 
            +
              "<|endoftext|>": 151643,
         | 
| 7 | 
            +
              "<|file_sep|>": 151664,
         | 
| 8 | 
            +
              "<|fim_middle|>": 151660,
         | 
| 9 | 
            +
              "<|fim_pad|>": 151662,
         | 
| 10 | 
            +
              "<|fim_prefix|>": 151659,
         | 
| 11 | 
            +
              "<|fim_suffix|>": 151661,
         | 
| 12 | 
            +
              "<|im_end|>": 151645,
         | 
| 13 | 
            +
              "<|im_start|>": 151644,
         | 
| 14 | 
            +
              "<|image_pad|>": 151655,
         | 
| 15 | 
            +
              "<|object_ref_end|>": 151647,
         | 
| 16 | 
            +
              "<|object_ref_start|>": 151646,
         | 
| 17 | 
            +
              "<|quad_end|>": 151651,
         | 
| 18 | 
            +
              "<|quad_start|>": 151650,
         | 
| 19 | 
            +
              "<|repo_name|>": 151663,
         | 
| 20 | 
            +
              "<|video_pad|>": 151656,
         | 
| 21 | 
            +
              "<|vision_end|>": 151653,
         | 
| 22 | 
            +
              "<|vision_pad|>": 151654,
         | 
| 23 | 
            +
              "<|vision_start|>": 151652
         | 
| 24 | 
            +
            }
         | 
    	
        checkpoint-1225/chat_template.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
         | 
| 3 | 
            +
            }
         | 
    	
        checkpoint-1225/config.json
    ADDED
    
    | @@ -0,0 +1,50 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_name_or_path": "/home/ma-user/work/haozhe/muze/models/Qwen2.5-VL-7B-Instruct",
         | 
| 3 | 
            +
              "architectures": [
         | 
| 4 | 
            +
                "Qwen2_5_VLForConditionalGeneration"
         | 
| 5 | 
            +
              ],
         | 
| 6 | 
            +
              "attention_dropout": 0.0,
         | 
| 7 | 
            +
              "bos_token_id": 151643,
         | 
| 8 | 
            +
              "eos_token_id": 151645,
         | 
| 9 | 
            +
              "hidden_act": "silu",
         | 
| 10 | 
            +
              "hidden_size": 3584,
         | 
| 11 | 
            +
              "image_token_id": 151655,
         | 
| 12 | 
            +
              "initializer_range": 0.02,
         | 
| 13 | 
            +
              "intermediate_size": 18944,
         | 
| 14 | 
            +
              "max_position_embeddings": 128000,
         | 
| 15 | 
            +
              "max_window_layers": 28,
         | 
| 16 | 
            +
              "model_type": "qwen2_5_vl",
         | 
| 17 | 
            +
              "num_attention_heads": 28,
         | 
| 18 | 
            +
              "num_hidden_layers": 28,
         | 
| 19 | 
            +
              "num_key_value_heads": 4,
         | 
| 20 | 
            +
              "rms_norm_eps": 1e-06,
         | 
| 21 | 
            +
              "rope_scaling": {
         | 
| 22 | 
            +
                "mrope_section": [
         | 
| 23 | 
            +
                  16,
         | 
| 24 | 
            +
                  24,
         | 
| 25 | 
            +
                  24
         | 
| 26 | 
            +
                ],
         | 
| 27 | 
            +
                "rope_type": "default",
         | 
| 28 | 
            +
                "type": "default"
         | 
| 29 | 
            +
              },
         | 
| 30 | 
            +
              "rope_theta": 1000000.0,
         | 
| 31 | 
            +
              "sliding_window": 32768,
         | 
| 32 | 
            +
              "tie_word_embeddings": false,
         | 
| 33 | 
            +
              "torch_dtype": "bfloat16",
         | 
| 34 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 35 | 
            +
              "use_cache": false,
         | 
| 36 | 
            +
              "use_sliding_window": false,
         | 
| 37 | 
            +
              "video_token_id": 151656,
         | 
| 38 | 
            +
              "vision_config": {
         | 
| 39 | 
            +
                "hidden_size": 1280,
         | 
| 40 | 
            +
                "in_chans": 3,
         | 
| 41 | 
            +
                "model_type": "qwen2_5_vl",
         | 
| 42 | 
            +
                "spatial_patch_size": 14,
         | 
| 43 | 
            +
                "tokens_per_second": 2,
         | 
| 44 | 
            +
                "torch_dtype": "bfloat16"
         | 
| 45 | 
            +
              },
         | 
| 46 | 
            +
              "vision_end_token_id": 151653,
         | 
| 47 | 
            +
              "vision_start_token_id": 151652,
         | 
| 48 | 
            +
              "vision_token_id": 151654,
         | 
| 49 | 
            +
              "vocab_size": 152064
         | 
| 50 | 
            +
            }
         | 
    	
        checkpoint-1225/generation_config.json
    ADDED
    
    | @@ -0,0 +1,14 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "attn_implementation": "flash_attention_2",
         | 
| 3 | 
            +
              "bos_token_id": 151643,
         | 
| 4 | 
            +
              "do_sample": true,
         | 
| 5 | 
            +
              "eos_token_id": [
         | 
| 6 | 
            +
                151645,
         | 
| 7 | 
            +
                151643
         | 
| 8 | 
            +
              ],
         | 
| 9 | 
            +
              "pad_token_id": 151643,
         | 
| 10 | 
            +
              "repetition_penalty": 1.05,
         | 
| 11 | 
            +
              "temperature": 1e-06,
         | 
| 12 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 13 | 
            +
              "use_cache": false
         | 
| 14 | 
            +
            }
         | 
    	
        checkpoint-1225/merges.txt
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-1225/model-00001-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:c6936c79f9fe5030c7c106f0389434c62f51074133d66852ccf63031c91eaa12
         | 
| 3 | 
            +
            size 4968243304
         | 
    	
        checkpoint-1225/model-00002-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:3ffe989dbfa5fd825e670f9552972cf8a72c3af9e58a1829335f65714f89cd1b
         | 
| 3 | 
            +
            size 4991495816
         | 
    	
        checkpoint-1225/model-00003-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:006cb3ea133162755e0203bf23be3795f38d9ee3c3128d5a68b256b311b12ed6
         | 
| 3 | 
            +
            size 4932751040
         | 
    	
        checkpoint-1225/model-00004-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:0c0b5eef4adb4c8ea0c2518ef5d6fc8927eab4d0509067f39a8a3e4d40e139d7
         | 
| 3 | 
            +
            size 1691924384
         | 
    	
        checkpoint-1225/model.safetensors.index.json
    ADDED
    
    | @@ -0,0 +1,736 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "metadata": {
         | 
| 3 | 
            +
                "total_size": 16584333312
         | 
| 4 | 
            +
              },
         | 
| 5 | 
            +
              "weight_map": {
         | 
| 6 | 
            +
                "lm_head.weight": "model-00004-of-00004.safetensors",
         | 
| 7 | 
            +
                "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
         | 
| 8 | 
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 9 | 
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 10 | 
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 11 | 
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 12 | 
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 13 | 
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 14 | 
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 15 | 
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 16 | 
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 17 | 
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 18 | 
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 19 | 
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 20 | 
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 21 | 
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 22 | 
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 23 | 
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 24 | 
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 25 | 
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 26 | 
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 27 | 
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 28 | 
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 29 | 
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 30 | 
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 31 | 
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 32 | 
            +
                "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 33 | 
            +
                "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 34 | 
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 35 | 
            +
                "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 36 | 
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 37 | 
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 38 | 
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 39 | 
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 40 | 
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 41 | 
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 42 | 
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 43 | 
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 44 | 
            +
                "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 45 | 
            +
                "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 46 | 
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 47 | 
            +
                "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 48 | 
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 49 | 
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 50 | 
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 51 | 
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 52 | 
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 53 | 
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 54 | 
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 55 | 
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 56 | 
            +
                "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 57 | 
            +
                "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 58 | 
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 59 | 
            +
                "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 60 | 
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 61 | 
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 62 | 
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 63 | 
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 64 | 
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 65 | 
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 66 | 
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 67 | 
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 68 | 
            +
                "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 69 | 
            +
                "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 70 | 
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 71 | 
            +
                "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 72 | 
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 73 | 
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 74 | 
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 75 | 
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 76 | 
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 77 | 
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 78 | 
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 79 | 
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 80 | 
            +
                "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 81 | 
            +
                "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 82 | 
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 83 | 
            +
                "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 84 | 
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 85 | 
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 86 | 
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 87 | 
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 88 | 
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 89 | 
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 90 | 
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 91 | 
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 92 | 
            +
                "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 93 | 
            +
                "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 94 | 
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 95 | 
            +
                "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 96 | 
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 97 | 
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 98 | 
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 99 | 
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 100 | 
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 101 | 
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 102 | 
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 103 | 
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 104 | 
            +
                "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 105 | 
            +
                "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 106 | 
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 107 | 
            +
                "model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 108 | 
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 109 | 
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 110 | 
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 111 | 
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 112 | 
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 113 | 
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 114 | 
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 115 | 
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 116 | 
            +
                "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 117 | 
            +
                "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 118 | 
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 119 | 
            +
                "model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 120 | 
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 121 | 
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 122 | 
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 123 | 
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 124 | 
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 125 | 
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 126 | 
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 127 | 
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 128 | 
            +
                "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 129 | 
            +
                "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 130 | 
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 131 | 
            +
                "model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 132 | 
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 133 | 
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 134 | 
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 135 | 
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 136 | 
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 137 | 
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 138 | 
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 139 | 
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 140 | 
            +
                "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 141 | 
            +
                "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 142 | 
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 143 | 
            +
                "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 144 | 
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 145 | 
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 146 | 
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 147 | 
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 148 | 
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 149 | 
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 150 | 
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 151 | 
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 152 | 
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 153 | 
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 154 | 
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 155 | 
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 156 | 
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 157 | 
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 158 | 
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 159 | 
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 160 | 
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 161 | 
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 162 | 
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 163 | 
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 164 | 
            +
                "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 165 | 
            +
                "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 166 | 
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 167 | 
            +
                "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 168 | 
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 169 | 
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 170 | 
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 171 | 
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 172 | 
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 173 | 
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 174 | 
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 175 | 
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 176 | 
            +
                "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 177 | 
            +
                "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 178 | 
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 179 | 
            +
                "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 180 | 
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 181 | 
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 182 | 
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 183 | 
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 184 | 
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 185 | 
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 186 | 
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 187 | 
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 188 | 
            +
                "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 189 | 
            +
                "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 190 | 
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 191 | 
            +
                "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 192 | 
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 193 | 
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 194 | 
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 195 | 
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 196 | 
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 197 | 
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 198 | 
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 199 | 
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 200 | 
            +
                "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 201 | 
            +
                "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 202 | 
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 203 | 
            +
                "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 204 | 
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 205 | 
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 206 | 
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 207 | 
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 208 | 
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 209 | 
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 210 | 
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 211 | 
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 212 | 
            +
                "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 213 | 
            +
                "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 214 | 
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 215 | 
            +
                "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 216 | 
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 217 | 
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 218 | 
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 219 | 
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 220 | 
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 221 | 
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 222 | 
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 223 | 
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 224 | 
            +
                "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 225 | 
            +
                "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 226 | 
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 227 | 
            +
                "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 228 | 
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 229 | 
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 230 | 
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 231 | 
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 232 | 
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 233 | 
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 234 | 
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 235 | 
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 236 | 
            +
                "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 237 | 
            +
                "model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 238 | 
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 239 | 
            +
                "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 240 | 
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 241 | 
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 242 | 
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 243 | 
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 244 | 
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 245 | 
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 246 | 
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 247 | 
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 248 | 
            +
                "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 249 | 
            +
                "model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 250 | 
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 251 | 
            +
                "model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 252 | 
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 253 | 
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 254 | 
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 255 | 
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 256 | 
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 257 | 
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 258 | 
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 259 | 
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 260 | 
            +
                "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 261 | 
            +
                "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 262 | 
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 263 | 
            +
                "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 264 | 
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 265 | 
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 266 | 
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 267 | 
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 268 | 
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 269 | 
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 270 | 
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 271 | 
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 272 | 
            +
                "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 273 | 
            +
                "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 274 | 
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 275 | 
            +
                "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 276 | 
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 277 | 
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 278 | 
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 279 | 
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 280 | 
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 281 | 
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 282 | 
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 283 | 
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 284 | 
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 285 | 
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 286 | 
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 287 | 
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 288 | 
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 289 | 
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 290 | 
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 291 | 
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 292 | 
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 293 | 
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 294 | 
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 295 | 
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 296 | 
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 297 | 
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 298 | 
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 299 | 
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 300 | 
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 301 | 
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 302 | 
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 303 | 
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 304 | 
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 305 | 
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 306 | 
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 307 | 
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 308 | 
            +
                "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 309 | 
            +
                "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 310 | 
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 311 | 
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 312 | 
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 313 | 
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 314 | 
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 315 | 
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 316 | 
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 317 | 
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 318 | 
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 319 | 
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 320 | 
            +
                "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 321 | 
            +
                "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 322 | 
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 323 | 
            +
                "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 324 | 
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 325 | 
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 326 | 
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 327 | 
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 328 | 
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 329 | 
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 330 | 
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 331 | 
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 332 | 
            +
                "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 333 | 
            +
                "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 334 | 
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 335 | 
            +
                "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 336 | 
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 337 | 
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 338 | 
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 339 | 
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 340 | 
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 341 | 
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 342 | 
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 343 | 
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 344 | 
            +
                "model.norm.weight": "model-00004-of-00004.safetensors",
         | 
| 345 | 
            +
                "visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 346 | 
            +
                "visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 347 | 
            +
                "visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 348 | 
            +
                "visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 349 | 
            +
                "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 350 | 
            +
                "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 351 | 
            +
                "visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 352 | 
            +
                "visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 353 | 
            +
                "visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 354 | 
            +
                "visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 355 | 
            +
                "visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 356 | 
            +
                "visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 357 | 
            +
                "visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 358 | 
            +
                "visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 359 | 
            +
                "visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 360 | 
            +
                "visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 361 | 
            +
                "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 362 | 
            +
                "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 363 | 
            +
                "visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 364 | 
            +
                "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 365 | 
            +
                "visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 366 | 
            +
                "visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 367 | 
            +
                "visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 368 | 
            +
                "visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 369 | 
            +
                "visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 370 | 
            +
                "visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 371 | 
            +
                "visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 372 | 
            +
                "visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 373 | 
            +
                "visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 374 | 
            +
                "visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 375 | 
            +
                "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 376 | 
            +
                "visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 377 | 
            +
                "visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 378 | 
            +
                "visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 379 | 
            +
                "visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 380 | 
            +
                "visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 381 | 
            +
                "visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 382 | 
            +
                "visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 383 | 
            +
                "visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 384 | 
            +
                "visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 385 | 
            +
                "visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 386 | 
            +
                "visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 387 | 
            +
                "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 388 | 
            +
                "visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 389 | 
            +
                "visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 390 | 
            +
                "visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 391 | 
            +
                "visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 392 | 
            +
                "visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 393 | 
            +
                "visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 394 | 
            +
                "visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 395 | 
            +
                "visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 396 | 
            +
                "visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 397 | 
            +
                "visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 398 | 
            +
                "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 399 | 
            +
                "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 400 | 
            +
                "visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 401 | 
            +
                "visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 402 | 
            +
                "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 403 | 
            +
                "visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 404 | 
            +
                "visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 405 | 
            +
                "visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 406 | 
            +
                "visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 407 | 
            +
                "visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 408 | 
            +
                "visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 409 | 
            +
                "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 410 | 
            +
                "visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 411 | 
            +
                "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 412 | 
            +
                "visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 413 | 
            +
                "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 414 | 
            +
                "visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 415 | 
            +
                "visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 416 | 
            +
                "visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 417 | 
            +
                "visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 418 | 
            +
                "visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 419 | 
            +
                "visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 420 | 
            +
                "visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 421 | 
            +
                "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 422 | 
            +
                "visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 423 | 
            +
                "visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 424 | 
            +
                "visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 425 | 
            +
                "visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 426 | 
            +
                "visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 427 | 
            +
                "visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 428 | 
            +
                "visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 429 | 
            +
                "visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 430 | 
            +
                "visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 431 | 
            +
                "visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 432 | 
            +
                "visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 433 | 
            +
                "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 434 | 
            +
                "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 435 | 
            +
                "visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 436 | 
            +
                "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 437 | 
            +
                "visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 438 | 
            +
                "visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 439 | 
            +
                "visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 440 | 
            +
                "visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 441 | 
            +
                "visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 442 | 
            +
                "visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 443 | 
            +
                "visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 444 | 
            +
                "visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 445 | 
            +
                "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 446 | 
            +
                "visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 447 | 
            +
                "visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 448 | 
            +
                "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 449 | 
            +
                "visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 450 | 
            +
                "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 451 | 
            +
                "visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 452 | 
            +
                "visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 453 | 
            +
                "visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 454 | 
            +
                "visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 455 | 
            +
                "visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 456 | 
            +
                "visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 457 | 
            +
                "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 458 | 
            +
                "visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 459 | 
            +
                "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 460 | 
            +
                "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 461 | 
            +
                "visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 462 | 
            +
                "visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 463 | 
            +
                "visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 464 | 
            +
                "visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 465 | 
            +
                "visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 466 | 
            +
                "visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 467 | 
            +
                "visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 468 | 
            +
                "visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 469 | 
            +
                "visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 470 | 
            +
                "visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 471 | 
            +
                "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 472 | 
            +
                "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 473 | 
            +
                "visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 474 | 
            +
                "visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 475 | 
            +
                "visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 476 | 
            +
                "visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 477 | 
            +
                "visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 478 | 
            +
                "visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 479 | 
            +
                "visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 480 | 
            +
                "visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 481 | 
            +
                "visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 482 | 
            +
                "visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 483 | 
            +
                "visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 484 | 
            +
                "visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 485 | 
            +
                "visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 486 | 
            +
                "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 487 | 
            +
                "visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 488 | 
            +
                "visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 489 | 
            +
                "visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 490 | 
            +
                "visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 491 | 
            +
                "visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 492 | 
            +
                "visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 493 | 
            +
                "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 494 | 
            +
                "visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 495 | 
            +
                "visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 496 | 
            +
                "visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 497 | 
            +
                "visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 498 | 
            +
                "visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 499 | 
            +
                "visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 500 | 
            +
                "visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 501 | 
            +
                "visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 502 | 
            +
                "visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 503 | 
            +
                "visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 504 | 
            +
                "visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 505 | 
            +
                "visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 506 | 
            +
                "visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 507 | 
            +
                "visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 508 | 
            +
                "visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 509 | 
            +
                "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 510 | 
            +
                "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 511 | 
            +
                "visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 512 | 
            +
                "visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 513 | 
            +
                "visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 514 | 
            +
                "visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 515 | 
            +
                "visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 516 | 
            +
                "visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 517 | 
            +
                "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 518 | 
            +
                "visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 519 | 
            +
                "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 520 | 
            +
                "visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 521 | 
            +
                "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 522 | 
            +
                "visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 523 | 
            +
                "visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 524 | 
            +
                "visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 525 | 
            +
                "visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 526 | 
            +
                "visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 527 | 
            +
                "visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 528 | 
            +
                "visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 529 | 
            +
                "visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 530 | 
            +
                "visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 531 | 
            +
                "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 532 | 
            +
                "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 533 | 
            +
                "visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 534 | 
            +
                "visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 535 | 
            +
                "visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 536 | 
            +
                "visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 537 | 
            +
                "visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 538 | 
            +
                "visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 539 | 
            +
                "visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 540 | 
            +
                "visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 541 | 
            +
                "visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 542 | 
            +
                "visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 543 | 
            +
                "visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 544 | 
            +
                "visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 545 | 
            +
                "visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 546 | 
            +
                "visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 547 | 
            +
                "visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 548 | 
            +
                "visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 549 | 
            +
                "visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 550 | 
            +
                "visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 551 | 
            +
                "visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 552 | 
            +
                "visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 553 | 
            +
                "visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 554 | 
            +
                "visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 555 | 
            +
                "visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 556 | 
            +
                "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 557 | 
            +
                "visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 558 | 
            +
                "visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 559 | 
            +
                "visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 560 | 
            +
                "visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 561 | 
            +
                "visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 562 | 
            +
                "visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 563 | 
            +
                "visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 564 | 
            +
                "visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 565 | 
            +
                "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 566 | 
            +
                "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 567 | 
            +
                "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 568 | 
            +
                "visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 569 | 
            +
                "visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 570 | 
            +
                "visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 571 | 
            +
                "visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 572 | 
            +
                "visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 573 | 
            +
                "visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 574 | 
            +
                "visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 575 | 
            +
                "visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 576 | 
            +
                "visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 577 | 
            +
                "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 578 | 
            +
                "visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 579 | 
            +
                "visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 580 | 
            +
                "visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 581 | 
            +
                "visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 582 | 
            +
                "visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 583 | 
            +
                "visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 584 | 
            +
                "visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 585 | 
            +
                "visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 586 | 
            +
                "visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 587 | 
            +
                "visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 588 | 
            +
                "visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 589 | 
            +
                "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 590 | 
            +
                "visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 591 | 
            +
                "visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 592 | 
            +
                "visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 593 | 
            +
                "visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 594 | 
            +
                "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 595 | 
            +
                "visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 596 | 
            +
                "visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 597 | 
            +
                "visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 598 | 
            +
                "visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 599 | 
            +
                "visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 600 | 
            +
                "visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 601 | 
            +
                "visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 602 | 
            +
                "visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 603 | 
            +
                "visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 604 | 
            +
                "visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 605 | 
            +
                "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 606 | 
            +
                "visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 607 | 
            +
                "visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 608 | 
            +
                "visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 609 | 
            +
                "visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 610 | 
            +
                "visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 611 | 
            +
                "visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 612 | 
            +
                "visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 613 | 
            +
                "visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 614 | 
            +
                "visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 615 | 
            +
                "visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 616 | 
            +
                "visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 617 | 
            +
                "visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 618 | 
            +
                "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 619 | 
            +
                "visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 620 | 
            +
                "visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 621 | 
            +
                "visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 622 | 
            +
                "visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 623 | 
            +
                "visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 624 | 
            +
                "visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 625 | 
            +
                "visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 626 | 
            +
                "visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 627 | 
            +
                "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 628 | 
            +
                "visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 629 | 
            +
                "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 630 | 
            +
                "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 631 | 
            +
                "visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 632 | 
            +
                "visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 633 | 
            +
                "visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 634 | 
            +
                "visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 635 | 
            +
                "visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 636 | 
            +
                "visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 637 | 
            +
                "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 638 | 
            +
                "visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 639 | 
            +
                "visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 640 | 
            +
                "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 641 | 
            +
                "visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 642 | 
            +
                "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 643 | 
            +
                "visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 644 | 
            +
                "visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 645 | 
            +
                "visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 646 | 
            +
                "visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 647 | 
            +
                "visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 648 | 
            +
                "visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 649 | 
            +
                "visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 650 | 
            +
                "visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 651 | 
            +
                "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 652 | 
            +
                "visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 653 | 
            +
                "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 654 | 
            +
                "visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 655 | 
            +
                "visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 656 | 
            +
                "visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 657 | 
            +
                "visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 658 | 
            +
                "visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 659 | 
            +
                "visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 660 | 
            +
                "visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 661 | 
            +
                "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 662 | 
            +
                "visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 663 | 
            +
                "visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 664 | 
            +
                "visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 665 | 
            +
                "visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 666 | 
            +
                "visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 667 | 
            +
                "visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 668 | 
            +
                "visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 669 | 
            +
                "visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 670 | 
            +
                "visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 671 | 
            +
                "visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 672 | 
            +
                "visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 673 | 
            +
                "visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 674 | 
            +
                "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 675 | 
            +
                "visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 676 | 
            +
                "visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 677 | 
            +
                "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 678 | 
            +
                "visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 679 | 
            +
                "visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 680 | 
            +
                "visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 681 | 
            +
                "visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 682 | 
            +
                "visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 683 | 
            +
                "visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 684 | 
            +
                "visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 685 | 
            +
                "visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 686 | 
            +
                "visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 687 | 
            +
                "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 688 | 
            +
                "visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 689 | 
            +
                "visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 690 | 
            +
                "visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 691 | 
            +
                "visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 692 | 
            +
                "visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 693 | 
            +
                "visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 694 | 
            +
                "visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 695 | 
            +
                "visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 696 | 
            +
                "visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 697 | 
            +
                "visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 698 | 
            +
                "visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 699 | 
            +
                "visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 700 | 
            +
                "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 701 | 
            +
                "visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 702 | 
            +
                "visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 703 | 
            +
                "visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 704 | 
            +
                "visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 705 | 
            +
                "visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 706 | 
            +
                "visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 707 | 
            +
                "visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 708 | 
            +
                "visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 709 | 
            +
                "visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 710 | 
            +
                "visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 711 | 
            +
                "visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 712 | 
            +
                "visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 713 | 
            +
                "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 714 | 
            +
                "visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 715 | 
            +
                "visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 716 | 
            +
                "visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 717 | 
            +
                "visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 718 | 
            +
                "visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 719 | 
            +
                "visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 720 | 
            +
                "visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 721 | 
            +
                "visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 722 | 
            +
                "visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 723 | 
            +
                "visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 724 | 
            +
                "visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 725 | 
            +
                "visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 726 | 
            +
                "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 727 | 
            +
                "visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 728 | 
            +
                "visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 729 | 
            +
                "visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
         | 
| 730 | 
            +
                "visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
         | 
| 731 | 
            +
                "visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
         | 
| 732 | 
            +
                "visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
         | 
| 733 | 
            +
                "visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
         | 
| 734 | 
            +
                "visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
         | 
| 735 | 
            +
              }
         | 
| 736 | 
            +
            }
         | 
    	
        checkpoint-1225/preprocessor_config.json
    ADDED
    
    | @@ -0,0 +1,29 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "do_convert_rgb": true,
         | 
| 3 | 
            +
              "do_normalize": true,
         | 
| 4 | 
            +
              "do_rescale": true,
         | 
| 5 | 
            +
              "do_resize": true,
         | 
| 6 | 
            +
              "image_mean": [
         | 
| 7 | 
            +
                0.48145466,
         | 
| 8 | 
            +
                0.4578275,
         | 
| 9 | 
            +
                0.40821073
         | 
| 10 | 
            +
              ],
         | 
| 11 | 
            +
              "image_processor_type": "Qwen2VLImageProcessor",
         | 
| 12 | 
            +
              "image_std": [
         | 
| 13 | 
            +
                0.26862954,
         | 
| 14 | 
            +
                0.26130258,
         | 
| 15 | 
            +
                0.27577711
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "max_pixels": 4014080,
         | 
| 18 | 
            +
              "merge_size": 2,
         | 
| 19 | 
            +
              "min_pixels": 3136,
         | 
| 20 | 
            +
              "patch_size": 14,
         | 
| 21 | 
            +
              "processor_class": "Qwen2_5_VLProcessor",
         | 
| 22 | 
            +
              "resample": 3,
         | 
| 23 | 
            +
              "rescale_factor": 0.00392156862745098,
         | 
| 24 | 
            +
              "size": {
         | 
| 25 | 
            +
                "longest_edge": 12845056,
         | 
| 26 | 
            +
                "shortest_edge": 3136
         | 
| 27 | 
            +
              },
         | 
| 28 | 
            +
              "temporal_patch_size": 2
         | 
| 29 | 
            +
            }
         | 
    	
        checkpoint-1225/special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,31 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "additional_special_tokens": [
         | 
| 3 | 
            +
                "<|im_start|>",
         | 
| 4 | 
            +
                "<|im_end|>",
         | 
| 5 | 
            +
                "<|object_ref_start|>",
         | 
| 6 | 
            +
                "<|object_ref_end|>",
         | 
| 7 | 
            +
                "<|box_start|>",
         | 
| 8 | 
            +
                "<|box_end|>",
         | 
| 9 | 
            +
                "<|quad_start|>",
         | 
| 10 | 
            +
                "<|quad_end|>",
         | 
| 11 | 
            +
                "<|vision_start|>",
         | 
| 12 | 
            +
                "<|vision_end|>",
         | 
| 13 | 
            +
                "<|vision_pad|>",
         | 
| 14 | 
            +
                "<|image_pad|>",
         | 
| 15 | 
            +
                "<|video_pad|>"
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "eos_token": {
         | 
| 18 | 
            +
                "content": "<|im_end|>",
         | 
| 19 | 
            +
                "lstrip": false,
         | 
| 20 | 
            +
                "normalized": false,
         | 
| 21 | 
            +
                "rstrip": false,
         | 
| 22 | 
            +
                "single_word": false
         | 
| 23 | 
            +
              },
         | 
| 24 | 
            +
              "pad_token": {
         | 
| 25 | 
            +
                "content": "<|endoftext|>",
         | 
| 26 | 
            +
                "lstrip": false,
         | 
| 27 | 
            +
                "normalized": false,
         | 
| 28 | 
            +
                "rstrip": false,
         | 
| 29 | 
            +
                "single_word": false
         | 
| 30 | 
            +
              }
         | 
| 31 | 
            +
            }
         | 
    	
        checkpoint-1225/tokenizer.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
         | 
| 3 | 
            +
            size 11421896
         | 
    	
        checkpoint-1225/tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,209 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "add_bos_token": false,
         | 
| 3 | 
            +
              "add_prefix_space": false,
         | 
| 4 | 
            +
              "added_tokens_decoder": {
         | 
| 5 | 
            +
                "151643": {
         | 
| 6 | 
            +
                  "content": "<|endoftext|>",
         | 
| 7 | 
            +
                  "lstrip": false,
         | 
| 8 | 
            +
                  "normalized": false,
         | 
| 9 | 
            +
                  "rstrip": false,
         | 
| 10 | 
            +
                  "single_word": false,
         | 
| 11 | 
            +
                  "special": true
         | 
| 12 | 
            +
                },
         | 
| 13 | 
            +
                "151644": {
         | 
| 14 | 
            +
                  "content": "<|im_start|>",
         | 
| 15 | 
            +
                  "lstrip": false,
         | 
| 16 | 
            +
                  "normalized": false,
         | 
| 17 | 
            +
                  "rstrip": false,
         | 
| 18 | 
            +
                  "single_word": false,
         | 
| 19 | 
            +
                  "special": true
         | 
| 20 | 
            +
                },
         | 
| 21 | 
            +
                "151645": {
         | 
| 22 | 
            +
                  "content": "<|im_end|>",
         | 
| 23 | 
            +
                  "lstrip": false,
         | 
| 24 | 
            +
                  "normalized": false,
         | 
| 25 | 
            +
                  "rstrip": false,
         | 
| 26 | 
            +
                  "single_word": false,
         | 
| 27 | 
            +
                  "special": true
         | 
| 28 | 
            +
                },
         | 
| 29 | 
            +
                "151646": {
         | 
| 30 | 
            +
                  "content": "<|object_ref_start|>",
         | 
| 31 | 
            +
                  "lstrip": false,
         | 
| 32 | 
            +
                  "normalized": false,
         | 
| 33 | 
            +
                  "rstrip": false,
         | 
| 34 | 
            +
                  "single_word": false,
         | 
| 35 | 
            +
                  "special": true
         | 
| 36 | 
            +
                },
         | 
| 37 | 
            +
                "151647": {
         | 
| 38 | 
            +
                  "content": "<|object_ref_end|>",
         | 
| 39 | 
            +
                  "lstrip": false,
         | 
| 40 | 
            +
                  "normalized": false,
         | 
| 41 | 
            +
                  "rstrip": false,
         | 
| 42 | 
            +
                  "single_word": false,
         | 
| 43 | 
            +
                  "special": true
         | 
| 44 | 
            +
                },
         | 
| 45 | 
            +
                "151648": {
         | 
| 46 | 
            +
                  "content": "<|box_start|>",
         | 
| 47 | 
            +
                  "lstrip": false,
         | 
| 48 | 
            +
                  "normalized": false,
         | 
| 49 | 
            +
                  "rstrip": false,
         | 
| 50 | 
            +
                  "single_word": false,
         | 
| 51 | 
            +
                  "special": true
         | 
| 52 | 
            +
                },
         | 
| 53 | 
            +
                "151649": {
         | 
| 54 | 
            +
                  "content": "<|box_end|>",
         | 
| 55 | 
            +
                  "lstrip": false,
         | 
| 56 | 
            +
                  "normalized": false,
         | 
| 57 | 
            +
                  "rstrip": false,
         | 
| 58 | 
            +
                  "single_word": false,
         | 
| 59 | 
            +
                  "special": true
         | 
| 60 | 
            +
                },
         | 
| 61 | 
            +
                "151650": {
         | 
| 62 | 
            +
                  "content": "<|quad_start|>",
         | 
| 63 | 
            +
                  "lstrip": false,
         | 
| 64 | 
            +
                  "normalized": false,
         | 
| 65 | 
            +
                  "rstrip": false,
         | 
| 66 | 
            +
                  "single_word": false,
         | 
| 67 | 
            +
                  "special": true
         | 
| 68 | 
            +
                },
         | 
| 69 | 
            +
                "151651": {
         | 
| 70 | 
            +
                  "content": "<|quad_end|>",
         | 
| 71 | 
            +
                  "lstrip": false,
         | 
| 72 | 
            +
                  "normalized": false,
         | 
| 73 | 
            +
                  "rstrip": false,
         | 
| 74 | 
            +
                  "single_word": false,
         | 
| 75 | 
            +
                  "special": true
         | 
| 76 | 
            +
                },
         | 
| 77 | 
            +
                "151652": {
         | 
| 78 | 
            +
                  "content": "<|vision_start|>",
         | 
| 79 | 
            +
                  "lstrip": false,
         | 
| 80 | 
            +
                  "normalized": false,
         | 
| 81 | 
            +
                  "rstrip": false,
         | 
| 82 | 
            +
                  "single_word": false,
         | 
| 83 | 
            +
                  "special": true
         | 
| 84 | 
            +
                },
         | 
| 85 | 
            +
                "151653": {
         | 
| 86 | 
            +
                  "content": "<|vision_end|>",
         | 
| 87 | 
            +
                  "lstrip": false,
         | 
| 88 | 
            +
                  "normalized": false,
         | 
| 89 | 
            +
                  "rstrip": false,
         | 
| 90 | 
            +
                  "single_word": false,
         | 
| 91 | 
            +
                  "special": true
         | 
| 92 | 
            +
                },
         | 
| 93 | 
            +
                "151654": {
         | 
| 94 | 
            +
                  "content": "<|vision_pad|>",
         | 
| 95 | 
            +
                  "lstrip": false,
         | 
| 96 | 
            +
                  "normalized": false,
         | 
| 97 | 
            +
                  "rstrip": false,
         | 
| 98 | 
            +
                  "single_word": false,
         | 
| 99 | 
            +
                  "special": true
         | 
| 100 | 
            +
                },
         | 
| 101 | 
            +
                "151655": {
         | 
| 102 | 
            +
                  "content": "<|image_pad|>",
         | 
| 103 | 
            +
                  "lstrip": false,
         | 
| 104 | 
            +
                  "normalized": false,
         | 
| 105 | 
            +
                  "rstrip": false,
         | 
| 106 | 
            +
                  "single_word": false,
         | 
| 107 | 
            +
                  "special": true
         | 
| 108 | 
            +
                },
         | 
| 109 | 
            +
                "151656": {
         | 
| 110 | 
            +
                  "content": "<|video_pad|>",
         | 
| 111 | 
            +
                  "lstrip": false,
         | 
| 112 | 
            +
                  "normalized": false,
         | 
| 113 | 
            +
                  "rstrip": false,
         | 
| 114 | 
            +
                  "single_word": false,
         | 
| 115 | 
            +
                  "special": true
         | 
| 116 | 
            +
                },
         | 
| 117 | 
            +
                "151657": {
         | 
| 118 | 
            +
                  "content": "<tool_call>",
         | 
| 119 | 
            +
                  "lstrip": false,
         | 
| 120 | 
            +
                  "normalized": false,
         | 
| 121 | 
            +
                  "rstrip": false,
         | 
| 122 | 
            +
                  "single_word": false,
         | 
| 123 | 
            +
                  "special": false
         | 
| 124 | 
            +
                },
         | 
| 125 | 
            +
                "151658": {
         | 
| 126 | 
            +
                  "content": "</tool_call>",
         | 
| 127 | 
            +
                  "lstrip": false,
         | 
| 128 | 
            +
                  "normalized": false,
         | 
| 129 | 
            +
                  "rstrip": false,
         | 
| 130 | 
            +
                  "single_word": false,
         | 
| 131 | 
            +
                  "special": false
         | 
| 132 | 
            +
                },
         | 
| 133 | 
            +
                "151659": {
         | 
| 134 | 
            +
                  "content": "<|fim_prefix|>",
         | 
| 135 | 
            +
                  "lstrip": false,
         | 
| 136 | 
            +
                  "normalized": false,
         | 
| 137 | 
            +
                  "rstrip": false,
         | 
| 138 | 
            +
                  "single_word": false,
         | 
| 139 | 
            +
                  "special": false
         | 
| 140 | 
            +
                },
         | 
| 141 | 
            +
                "151660": {
         | 
| 142 | 
            +
                  "content": "<|fim_middle|>",
         | 
| 143 | 
            +
                  "lstrip": false,
         | 
| 144 | 
            +
                  "normalized": false,
         | 
| 145 | 
            +
                  "rstrip": false,
         | 
| 146 | 
            +
                  "single_word": false,
         | 
| 147 | 
            +
                  "special": false
         | 
| 148 | 
            +
                },
         | 
| 149 | 
            +
                "151661": {
         | 
| 150 | 
            +
                  "content": "<|fim_suffix|>",
         | 
| 151 | 
            +
                  "lstrip": false,
         | 
| 152 | 
            +
                  "normalized": false,
         | 
| 153 | 
            +
                  "rstrip": false,
         | 
| 154 | 
            +
                  "single_word": false,
         | 
| 155 | 
            +
                  "special": false
         | 
| 156 | 
            +
                },
         | 
| 157 | 
            +
                "151662": {
         | 
| 158 | 
            +
                  "content": "<|fim_pad|>",
         | 
| 159 | 
            +
                  "lstrip": false,
         | 
| 160 | 
            +
                  "normalized": false,
         | 
| 161 | 
            +
                  "rstrip": false,
         | 
| 162 | 
            +
                  "single_word": false,
         | 
| 163 | 
            +
                  "special": false
         | 
| 164 | 
            +
                },
         | 
| 165 | 
            +
                "151663": {
         | 
| 166 | 
            +
                  "content": "<|repo_name|>",
         | 
| 167 | 
            +
                  "lstrip": false,
         | 
| 168 | 
            +
                  "normalized": false,
         | 
| 169 | 
            +
                  "rstrip": false,
         | 
| 170 | 
            +
                  "single_word": false,
         | 
| 171 | 
            +
                  "special": false
         | 
| 172 | 
            +
                },
         | 
| 173 | 
            +
                "151664": {
         | 
| 174 | 
            +
                  "content": "<|file_sep|>",
         | 
| 175 | 
            +
                  "lstrip": false,
         | 
| 176 | 
            +
                  "normalized": false,
         | 
| 177 | 
            +
                  "rstrip": false,
         | 
| 178 | 
            +
                  "single_word": false,
         | 
| 179 | 
            +
                  "special": false
         | 
| 180 | 
            +
                }
         | 
| 181 | 
            +
              },
         | 
| 182 | 
            +
              "additional_special_tokens": [
         | 
| 183 | 
            +
                "<|im_start|>",
         | 
| 184 | 
            +
                "<|im_end|>",
         | 
| 185 | 
            +
                "<|object_ref_start|>",
         | 
| 186 | 
            +
                "<|object_ref_end|>",
         | 
| 187 | 
            +
                "<|box_start|>",
         | 
| 188 | 
            +
                "<|box_end|>",
         | 
| 189 | 
            +
                "<|quad_start|>",
         | 
| 190 | 
            +
                "<|quad_end|>",
         | 
| 191 | 
            +
                "<|vision_start|>",
         | 
| 192 | 
            +
                "<|vision_end|>",
         | 
| 193 | 
            +
                "<|vision_pad|>",
         | 
| 194 | 
            +
                "<|image_pad|>",
         | 
| 195 | 
            +
                "<|video_pad|>"
         | 
| 196 | 
            +
              ],
         | 
| 197 | 
            +
              "bos_token": null,
         | 
| 198 | 
            +
              "chat_template": "{%- if tools %}\n    {{- '<|im_start|>system\\n' }}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- messages[0]['content'] }}\n    {%- else %}\n        {{- 'You are a helpful assistant.' }}\n    {%- endif %}\n    {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n    {%- for tool in tools %}\n        {{- \"\\n\" }}\n        {{- tool | tojson }}\n    {%- endfor %}\n    {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n    {%- else %}\n        {{- '<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n' }}\n    {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + message.content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
         | 
| 199 | 
            +
              "clean_up_tokenization_spaces": false,
         | 
| 200 | 
            +
              "eos_token": "<|im_end|>",
         | 
| 201 | 
            +
              "errors": "replace",
         | 
| 202 | 
            +
              "extra_special_tokens": {},
         | 
| 203 | 
            +
              "model_max_length": 131072,
         | 
| 204 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 205 | 
            +
              "processor_class": "Qwen2_5_VLProcessor",
         | 
| 206 | 
            +
              "split_special_tokens": false,
         | 
| 207 | 
            +
              "tokenizer_class": "Qwen2Tokenizer",
         | 
| 208 | 
            +
              "unk_token": null
         | 
| 209 | 
            +
            }
         | 
    	
        checkpoint-1225/trainer_state.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-1225/training_args.bin
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:d5ba1195b28d4eb5f6c01fc66fa88747aff65390ba93c1fc53707d292a8b581b
         | 
| 3 | 
            +
            size 7672
         | 
    	
        checkpoint-1225/vocab.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-246/added_tokens.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "</tool_call>": 151658,
         | 
| 3 | 
            +
              "<tool_call>": 151657,
         | 
| 4 | 
            +
              "<|box_end|>": 151649,
         | 
| 5 | 
            +
              "<|box_start|>": 151648,
         | 
| 6 | 
            +
              "<|endoftext|>": 151643,
         | 
| 7 | 
            +
              "<|file_sep|>": 151664,
         | 
| 8 | 
            +
              "<|fim_middle|>": 151660,
         | 
| 9 | 
            +
              "<|fim_pad|>": 151662,
         | 
| 10 | 
            +
              "<|fim_prefix|>": 151659,
         | 
| 11 | 
            +
              "<|fim_suffix|>": 151661,
         | 
| 12 | 
            +
              "<|im_end|>": 151645,
         | 
| 13 | 
            +
              "<|im_start|>": 151644,
         | 
| 14 | 
            +
              "<|image_pad|>": 151655,
         | 
| 15 | 
            +
              "<|object_ref_end|>": 151647,
         | 
| 16 | 
            +
              "<|object_ref_start|>": 151646,
         | 
| 17 | 
            +
              "<|quad_end|>": 151651,
         | 
| 18 | 
            +
              "<|quad_start|>": 151650,
         | 
| 19 | 
            +
              "<|repo_name|>": 151663,
         | 
| 20 | 
            +
              "<|video_pad|>": 151656,
         | 
| 21 | 
            +
              "<|vision_end|>": 151653,
         | 
| 22 | 
            +
              "<|vision_pad|>": 151654,
         | 
| 23 | 
            +
              "<|vision_start|>": 151652
         | 
| 24 | 
            +
            }
         | 
    	
        checkpoint-246/chat_template.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
         | 
| 3 | 
            +
            }
         | 
    	
        checkpoint-246/config.json
    ADDED
    
    | @@ -0,0 +1,50 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_name_or_path": "/home/ma-user/work/haozhe/muze/models/Qwen2.5-VL-7B-Instruct",
         | 
| 3 | 
            +
              "architectures": [
         | 
| 4 | 
            +
                "Qwen2_5_VLForConditionalGeneration"
         | 
| 5 | 
            +
              ],
         | 
| 6 | 
            +
              "attention_dropout": 0.0,
         | 
| 7 | 
            +
              "bos_token_id": 151643,
         | 
| 8 | 
            +
              "eos_token_id": 151645,
         | 
| 9 | 
            +
              "hidden_act": "silu",
         | 
| 10 | 
            +
              "hidden_size": 3584,
         | 
| 11 | 
            +
              "image_token_id": 151655,
         | 
| 12 | 
            +
              "initializer_range": 0.02,
         | 
| 13 | 
            +
              "intermediate_size": 18944,
         | 
| 14 | 
            +
              "max_position_embeddings": 128000,
         | 
| 15 | 
            +
              "max_window_layers": 28,
         | 
| 16 | 
            +
              "model_type": "qwen2_5_vl",
         | 
| 17 | 
            +
              "num_attention_heads": 28,
         | 
| 18 | 
            +
              "num_hidden_layers": 28,
         | 
| 19 | 
            +
              "num_key_value_heads": 4,
         | 
| 20 | 
            +
              "rms_norm_eps": 1e-06,
         | 
| 21 | 
            +
              "rope_scaling": {
         | 
| 22 | 
            +
                "mrope_section": [
         | 
| 23 | 
            +
                  16,
         | 
| 24 | 
            +
                  24,
         | 
| 25 | 
            +
                  24
         | 
| 26 | 
            +
                ],
         | 
| 27 | 
            +
                "rope_type": "default",
         | 
| 28 | 
            +
                "type": "default"
         | 
| 29 | 
            +
              },
         | 
| 30 | 
            +
              "rope_theta": 1000000.0,
         | 
| 31 | 
            +
              "sliding_window": 32768,
         | 
| 32 | 
            +
              "tie_word_embeddings": false,
         | 
| 33 | 
            +
              "torch_dtype": "bfloat16",
         | 
| 34 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 35 | 
            +
              "use_cache": false,
         | 
| 36 | 
            +
              "use_sliding_window": false,
         | 
| 37 | 
            +
              "video_token_id": 151656,
         | 
| 38 | 
            +
              "vision_config": {
         | 
| 39 | 
            +
                "hidden_size": 1280,
         | 
| 40 | 
            +
                "in_chans": 3,
         | 
| 41 | 
            +
                "model_type": "qwen2_5_vl",
         | 
| 42 | 
            +
                "spatial_patch_size": 14,
         | 
| 43 | 
            +
                "tokens_per_second": 2,
         | 
| 44 | 
            +
                "torch_dtype": "bfloat16"
         | 
| 45 | 
            +
              },
         | 
| 46 | 
            +
              "vision_end_token_id": 151653,
         | 
| 47 | 
            +
              "vision_start_token_id": 151652,
         | 
| 48 | 
            +
              "vision_token_id": 151654,
         | 
| 49 | 
            +
              "vocab_size": 152064
         | 
| 50 | 
            +
            }
         | 
    	
        checkpoint-246/generation_config.json
    ADDED
    
    | @@ -0,0 +1,14 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "attn_implementation": "flash_attention_2",
         | 
| 3 | 
            +
              "bos_token_id": 151643,
         | 
| 4 | 
            +
              "do_sample": true,
         | 
| 5 | 
            +
              "eos_token_id": [
         | 
| 6 | 
            +
                151645,
         | 
| 7 | 
            +
                151643
         | 
| 8 | 
            +
              ],
         | 
| 9 | 
            +
              "pad_token_id": 151643,
         | 
| 10 | 
            +
              "repetition_penalty": 1.05,
         | 
| 11 | 
            +
              "temperature": 1e-06,
         | 
| 12 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 13 | 
            +
              "use_cache": false
         | 
| 14 | 
            +
            }
         | 
    	
        checkpoint-246/merges.txt
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-246/model-00001-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9abad11ad78a0bcd5c1bfc900798b63cfa1a94fd1d3c9239a2ee5bc930482a59
         | 
| 3 | 
            +
            size 4968243304
         | 
    	
        checkpoint-246/model-00002-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:ab9034af032906afead0681d31933843ee36efc9e87a718d9506bb709193b292
         | 
| 3 | 
            +
            size 4991495816
         | 
    	
        checkpoint-246/model-00003-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:e2e4f5017562b9f24b85e6789f98c14d75ffc8635d8240f54f90a6e9d1992e60
         | 
| 3 | 
            +
            size 4932751040
         | 
    	
        checkpoint-246/model-00004-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:96305554a0f6e3a37efbbb3c28a0012d2a9e68092a8232505beb7d6363e3b188
         | 
| 3 | 
            +
            size 1691924384
         | 
    	
        checkpoint-246/model.safetensors.index.json
    ADDED
    
    | @@ -0,0 +1,736 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "metadata": {
         | 
| 3 | 
            +
                "total_size": 16584333312
         | 
| 4 | 
            +
              },
         | 
| 5 | 
            +
              "weight_map": {
         | 
| 6 | 
            +
                "lm_head.weight": "model-00004-of-00004.safetensors",
         | 
| 7 | 
            +
                "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
         | 
| 8 | 
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 9 | 
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 10 | 
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 11 | 
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 12 | 
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 13 | 
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 14 | 
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 15 | 
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 16 | 
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 17 | 
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 18 | 
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 19 | 
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 20 | 
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 21 | 
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 22 | 
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 23 | 
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 24 | 
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 25 | 
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 26 | 
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 27 | 
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 28 | 
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 29 | 
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 30 | 
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 31 | 
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 32 | 
            +
                "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 33 | 
            +
                "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 34 | 
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 35 | 
            +
                "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 36 | 
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 37 | 
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 38 | 
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 39 | 
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 40 | 
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 41 | 
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 42 | 
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 43 | 
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 44 | 
            +
                "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 45 | 
            +
                "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 46 | 
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 47 | 
            +
                "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 48 | 
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 49 | 
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 50 | 
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 51 | 
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 52 | 
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 53 | 
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 54 | 
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 55 | 
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 56 | 
            +
                "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 57 | 
            +
                "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 58 | 
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 59 | 
            +
                "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 60 | 
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 61 | 
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 62 | 
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 63 | 
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 64 | 
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 65 | 
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 66 | 
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 67 | 
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 68 | 
            +
                "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 69 | 
            +
                "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 70 | 
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 71 | 
            +
                "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 72 | 
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 73 | 
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 74 | 
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 75 | 
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 76 | 
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 77 | 
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 78 | 
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 79 | 
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 80 | 
            +
                "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 81 | 
            +
                "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 82 | 
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 83 | 
            +
                "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 84 | 
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 85 | 
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 86 | 
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 87 | 
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 88 | 
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 89 | 
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 90 | 
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 91 | 
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 92 | 
            +
                "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 93 | 
            +
                "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 94 | 
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 95 | 
            +
                "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 96 | 
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 97 | 
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 98 | 
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 99 | 
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 100 | 
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 101 | 
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 102 | 
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 103 | 
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 104 | 
            +
                "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 105 | 
            +
                "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 106 | 
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 107 | 
            +
                "model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 108 | 
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 109 | 
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 110 | 
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 111 | 
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 112 | 
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 113 | 
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 114 | 
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 115 | 
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 116 | 
            +
                "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 117 | 
            +
                "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 118 | 
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 119 | 
            +
                "model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 120 | 
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 121 | 
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 122 | 
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 123 | 
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 124 | 
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 125 | 
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 126 | 
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 127 | 
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 128 | 
            +
                "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 129 | 
            +
                "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 130 | 
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 131 | 
            +
                "model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 132 | 
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 133 | 
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 134 | 
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 135 | 
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 136 | 
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 137 | 
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 138 | 
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 139 | 
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 140 | 
            +
                "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 141 | 
            +
                "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 142 | 
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 143 | 
            +
                "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 144 | 
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 145 | 
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 146 | 
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 147 | 
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 148 | 
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 149 | 
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 150 | 
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 151 | 
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 152 | 
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 153 | 
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 154 | 
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 155 | 
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 156 | 
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 157 | 
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 158 | 
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 159 | 
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 160 | 
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 161 | 
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 162 | 
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 163 | 
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 164 | 
            +
                "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 165 | 
            +
                "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 166 | 
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 167 | 
            +
                "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 168 | 
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 169 | 
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 170 | 
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 171 | 
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 172 | 
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 173 | 
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 174 | 
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 175 | 
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 176 | 
            +
                "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 177 | 
            +
                "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 178 | 
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 179 | 
            +
                "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 180 | 
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 181 | 
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 182 | 
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 183 | 
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 184 | 
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 185 | 
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 186 | 
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 187 | 
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 188 | 
            +
                "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 189 | 
            +
                "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 190 | 
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 191 | 
            +
                "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 192 | 
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 193 | 
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 194 | 
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 195 | 
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 196 | 
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 197 | 
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 198 | 
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 199 | 
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 200 | 
            +
                "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 201 | 
            +
                "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 202 | 
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 203 | 
            +
                "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 204 | 
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 205 | 
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 206 | 
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 207 | 
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 208 | 
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 209 | 
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 210 | 
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 211 | 
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 212 | 
            +
                "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 213 | 
            +
                "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 214 | 
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 215 | 
            +
                "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 216 | 
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 217 | 
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 218 | 
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 219 | 
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 220 | 
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 221 | 
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 222 | 
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 223 | 
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 224 | 
            +
                "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 225 | 
            +
                "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 226 | 
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 227 | 
            +
                "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 228 | 
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 229 | 
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 230 | 
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 231 | 
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 232 | 
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 233 | 
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 234 | 
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 235 | 
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 236 | 
            +
                "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 237 | 
            +
                "model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 238 | 
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 239 | 
            +
                "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 240 | 
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 241 | 
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 242 | 
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 243 | 
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 244 | 
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 245 | 
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 246 | 
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 247 | 
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 248 | 
            +
                "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 249 | 
            +
                "model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 250 | 
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 251 | 
            +
                "model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 252 | 
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 253 | 
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 254 | 
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 255 | 
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 256 | 
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 257 | 
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 258 | 
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 259 | 
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 260 | 
            +
                "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 261 | 
            +
                "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 262 | 
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 263 | 
            +
                "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 264 | 
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 265 | 
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 266 | 
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 267 | 
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 268 | 
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 269 | 
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 270 | 
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 271 | 
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 272 | 
            +
                "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 273 | 
            +
                "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 274 | 
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 275 | 
            +
                "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 276 | 
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 277 | 
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 278 | 
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 279 | 
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 280 | 
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 281 | 
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 282 | 
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 283 | 
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 284 | 
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 285 | 
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 286 | 
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 287 | 
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 288 | 
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 289 | 
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 290 | 
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 291 | 
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 292 | 
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 293 | 
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 294 | 
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 295 | 
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 296 | 
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 297 | 
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 298 | 
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 299 | 
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 300 | 
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 301 | 
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 302 | 
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 303 | 
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 304 | 
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 305 | 
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 306 | 
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 307 | 
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 308 | 
            +
                "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 309 | 
            +
                "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 310 | 
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 311 | 
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 312 | 
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 313 | 
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 314 | 
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 315 | 
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 316 | 
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 317 | 
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 318 | 
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 319 | 
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 320 | 
            +
                "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 321 | 
            +
                "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 322 | 
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 323 | 
            +
                "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 324 | 
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 325 | 
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 326 | 
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 327 | 
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 328 | 
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 329 | 
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 330 | 
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 331 | 
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 332 | 
            +
                "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 333 | 
            +
                "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 334 | 
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 335 | 
            +
                "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 336 | 
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 337 | 
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 338 | 
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 339 | 
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 340 | 
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 341 | 
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 342 | 
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 343 | 
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 344 | 
            +
                "model.norm.weight": "model-00004-of-00004.safetensors",
         | 
| 345 | 
            +
                "visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 346 | 
            +
                "visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 347 | 
            +
                "visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 348 | 
            +
                "visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 349 | 
            +
                "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 350 | 
            +
                "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 351 | 
            +
                "visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 352 | 
            +
                "visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 353 | 
            +
                "visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 354 | 
            +
                "visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 355 | 
            +
                "visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 356 | 
            +
                "visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 357 | 
            +
                "visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 358 | 
            +
                "visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 359 | 
            +
                "visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 360 | 
            +
                "visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 361 | 
            +
                "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 362 | 
            +
                "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 363 | 
            +
                "visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 364 | 
            +
                "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 365 | 
            +
                "visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 366 | 
            +
                "visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 367 | 
            +
                "visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 368 | 
            +
                "visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 369 | 
            +
                "visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 370 | 
            +
                "visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 371 | 
            +
                "visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 372 | 
            +
                "visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 373 | 
            +
                "visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 374 | 
            +
                "visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 375 | 
            +
                "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 376 | 
            +
                "visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 377 | 
            +
                "visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 378 | 
            +
                "visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 379 | 
            +
                "visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 380 | 
            +
                "visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 381 | 
            +
                "visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 382 | 
            +
                "visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 383 | 
            +
                "visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 384 | 
            +
                "visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 385 | 
            +
                "visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 386 | 
            +
                "visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 387 | 
            +
                "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 388 | 
            +
                "visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 389 | 
            +
                "visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 390 | 
            +
                "visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 391 | 
            +
                "visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 392 | 
            +
                "visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 393 | 
            +
                "visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 394 | 
            +
                "visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 395 | 
            +
                "visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 396 | 
            +
                "visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 397 | 
            +
                "visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 398 | 
            +
                "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 399 | 
            +
                "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 400 | 
            +
                "visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 401 | 
            +
                "visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 402 | 
            +
                "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 403 | 
            +
                "visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 404 | 
            +
                "visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 405 | 
            +
                "visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 406 | 
            +
                "visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 407 | 
            +
                "visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 408 | 
            +
                "visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 409 | 
            +
                "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 410 | 
            +
                "visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 411 | 
            +
                "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 412 | 
            +
                "visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 413 | 
            +
                "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 414 | 
            +
                "visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 415 | 
            +
                "visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 416 | 
            +
                "visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 417 | 
            +
                "visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 418 | 
            +
                "visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 419 | 
            +
                "visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 420 | 
            +
                "visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 421 | 
            +
                "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 422 | 
            +
                "visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 423 | 
            +
                "visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 424 | 
            +
                "visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 425 | 
            +
                "visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 426 | 
            +
                "visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 427 | 
            +
                "visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 428 | 
            +
                "visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 429 | 
            +
                "visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 430 | 
            +
                "visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 431 | 
            +
                "visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 432 | 
            +
                "visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 433 | 
            +
                "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 434 | 
            +
                "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 435 | 
            +
                "visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 436 | 
            +
                "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 437 | 
            +
                "visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 438 | 
            +
                "visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 439 | 
            +
                "visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 440 | 
            +
                "visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 441 | 
            +
                "visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 442 | 
            +
                "visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 443 | 
            +
                "visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 444 | 
            +
                "visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 445 | 
            +
                "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 446 | 
            +
                "visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 447 | 
            +
                "visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 448 | 
            +
                "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 449 | 
            +
                "visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 450 | 
            +
                "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 451 | 
            +
                "visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 452 | 
            +
                "visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 453 | 
            +
                "visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 454 | 
            +
                "visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 455 | 
            +
                "visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 456 | 
            +
                "visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 457 | 
            +
                "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 458 | 
            +
                "visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 459 | 
            +
                "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 460 | 
            +
                "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 461 | 
            +
                "visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 462 | 
            +
                "visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 463 | 
            +
                "visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 464 | 
            +
                "visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 465 | 
            +
                "visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 466 | 
            +
                "visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 467 | 
            +
                "visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 468 | 
            +
                "visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 469 | 
            +
                "visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 470 | 
            +
                "visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 471 | 
            +
                "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 472 | 
            +
                "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 473 | 
            +
                "visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 474 | 
            +
                "visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 475 | 
            +
                "visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 476 | 
            +
                "visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 477 | 
            +
                "visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 478 | 
            +
                "visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 479 | 
            +
                "visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 480 | 
            +
                "visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 481 | 
            +
                "visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 482 | 
            +
                "visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 483 | 
            +
                "visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 484 | 
            +
                "visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 485 | 
            +
                "visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 486 | 
            +
                "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 487 | 
            +
                "visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 488 | 
            +
                "visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 489 | 
            +
                "visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 490 | 
            +
                "visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 491 | 
            +
                "visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 492 | 
            +
                "visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 493 | 
            +
                "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 494 | 
            +
                "visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 495 | 
            +
                "visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 496 | 
            +
                "visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 497 | 
            +
                "visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 498 | 
            +
                "visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 499 | 
            +
                "visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 500 | 
            +
                "visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 501 | 
            +
                "visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 502 | 
            +
                "visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 503 | 
            +
                "visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 504 | 
            +
                "visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 505 | 
            +
                "visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 506 | 
            +
                "visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 507 | 
            +
                "visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 508 | 
            +
                "visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 509 | 
            +
                "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 510 | 
            +
                "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 511 | 
            +
                "visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 512 | 
            +
                "visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 513 | 
            +
                "visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 514 | 
            +
                "visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 515 | 
            +
                "visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 516 | 
            +
                "visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 517 | 
            +
                "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 518 | 
            +
                "visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 519 | 
            +
                "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 520 | 
            +
                "visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 521 | 
            +
                "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 522 | 
            +
                "visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 523 | 
            +
                "visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 524 | 
            +
                "visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 525 | 
            +
                "visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 526 | 
            +
                "visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 527 | 
            +
                "visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 528 | 
            +
                "visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 529 | 
            +
                "visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 530 | 
            +
                "visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 531 | 
            +
                "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 532 | 
            +
                "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 533 | 
            +
                "visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 534 | 
            +
                "visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 535 | 
            +
                "visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 536 | 
            +
                "visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 537 | 
            +
                "visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 538 | 
            +
                "visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 539 | 
            +
                "visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 540 | 
            +
                "visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 541 | 
            +
                "visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 542 | 
            +
                "visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 543 | 
            +
                "visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 544 | 
            +
                "visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 545 | 
            +
                "visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 546 | 
            +
                "visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 547 | 
            +
                "visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 548 | 
            +
                "visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 549 | 
            +
                "visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 550 | 
            +
                "visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 551 | 
            +
                "visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 552 | 
            +
                "visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 553 | 
            +
                "visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 554 | 
            +
                "visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 555 | 
            +
                "visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 556 | 
            +
                "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 557 | 
            +
                "visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 558 | 
            +
                "visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 559 | 
            +
                "visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 560 | 
            +
                "visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 561 | 
            +
                "visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 562 | 
            +
                "visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 563 | 
            +
                "visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 564 | 
            +
                "visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 565 | 
            +
                "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 566 | 
            +
                "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 567 | 
            +
                "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 568 | 
            +
                "visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 569 | 
            +
                "visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 570 | 
            +
                "visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 571 | 
            +
                "visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 572 | 
            +
                "visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 573 | 
            +
                "visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 574 | 
            +
                "visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 575 | 
            +
                "visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 576 | 
            +
                "visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 577 | 
            +
                "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 578 | 
            +
                "visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 579 | 
            +
                "visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 580 | 
            +
                "visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 581 | 
            +
                "visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 582 | 
            +
                "visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 583 | 
            +
                "visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 584 | 
            +
                "visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 585 | 
            +
                "visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 586 | 
            +
                "visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 587 | 
            +
                "visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 588 | 
            +
                "visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 589 | 
            +
                "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 590 | 
            +
                "visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 591 | 
            +
                "visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 592 | 
            +
                "visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 593 | 
            +
                "visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 594 | 
            +
                "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 595 | 
            +
                "visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 596 | 
            +
                "visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 597 | 
            +
                "visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 598 | 
            +
                "visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 599 | 
            +
                "visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 600 | 
            +
                "visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 601 | 
            +
                "visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 602 | 
            +
                "visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 603 | 
            +
                "visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 604 | 
            +
                "visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 605 | 
            +
                "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 606 | 
            +
                "visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 607 | 
            +
                "visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 608 | 
            +
                "visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 609 | 
            +
                "visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 610 | 
            +
                "visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 611 | 
            +
                "visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 612 | 
            +
                "visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 613 | 
            +
                "visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 614 | 
            +
                "visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 615 | 
            +
                "visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 616 | 
            +
                "visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 617 | 
            +
                "visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 618 | 
            +
                "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 619 | 
            +
                "visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 620 | 
            +
                "visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 621 | 
            +
                "visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 622 | 
            +
                "visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 623 | 
            +
                "visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 624 | 
            +
                "visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 625 | 
            +
                "visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 626 | 
            +
                "visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 627 | 
            +
                "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 628 | 
            +
                "visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 629 | 
            +
                "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 630 | 
            +
                "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 631 | 
            +
                "visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 632 | 
            +
                "visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 633 | 
            +
                "visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 634 | 
            +
                "visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 635 | 
            +
                "visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 636 | 
            +
                "visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 637 | 
            +
                "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 638 | 
            +
                "visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 639 | 
            +
                "visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 640 | 
            +
                "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 641 | 
            +
                "visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 642 | 
            +
                "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 643 | 
            +
                "visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 644 | 
            +
                "visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 645 | 
            +
                "visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 646 | 
            +
                "visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 647 | 
            +
                "visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 648 | 
            +
                "visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 649 | 
            +
                "visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 650 | 
            +
                "visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 651 | 
            +
                "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 652 | 
            +
                "visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 653 | 
            +
                "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 654 | 
            +
                "visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 655 | 
            +
                "visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 656 | 
            +
                "visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 657 | 
            +
                "visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 658 | 
            +
                "visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 659 | 
            +
                "visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 660 | 
            +
                "visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 661 | 
            +
                "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 662 | 
            +
                "visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 663 | 
            +
                "visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 664 | 
            +
                "visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 665 | 
            +
                "visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 666 | 
            +
                "visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 667 | 
            +
                "visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 668 | 
            +
                "visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 669 | 
            +
                "visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 670 | 
            +
                "visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 671 | 
            +
                "visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 672 | 
            +
                "visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 673 | 
            +
                "visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 674 | 
            +
                "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 675 | 
            +
                "visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 676 | 
            +
                "visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 677 | 
            +
                "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 678 | 
            +
                "visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 679 | 
            +
                "visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 680 | 
            +
                "visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 681 | 
            +
                "visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 682 | 
            +
                "visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 683 | 
            +
                "visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 684 | 
            +
                "visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 685 | 
            +
                "visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 686 | 
            +
                "visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 687 | 
            +
                "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 688 | 
            +
                "visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 689 | 
            +
                "visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 690 | 
            +
                "visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 691 | 
            +
                "visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 692 | 
            +
                "visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 693 | 
            +
                "visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 694 | 
            +
                "visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 695 | 
            +
                "visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 696 | 
            +
                "visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 697 | 
            +
                "visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 698 | 
            +
                "visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 699 | 
            +
                "visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 700 | 
            +
                "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 701 | 
            +
                "visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 702 | 
            +
                "visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 703 | 
            +
                "visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 704 | 
            +
                "visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 705 | 
            +
                "visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 706 | 
            +
                "visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 707 | 
            +
                "visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 708 | 
            +
                "visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 709 | 
            +
                "visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 710 | 
            +
                "visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 711 | 
            +
                "visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 712 | 
            +
                "visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 713 | 
            +
                "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 714 | 
            +
                "visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 715 | 
            +
                "visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 716 | 
            +
                "visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 717 | 
            +
                "visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 718 | 
            +
                "visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 719 | 
            +
                "visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 720 | 
            +
                "visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 721 | 
            +
                "visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 722 | 
            +
                "visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 723 | 
            +
                "visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 724 | 
            +
                "visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 725 | 
            +
                "visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 726 | 
            +
                "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 727 | 
            +
                "visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 728 | 
            +
                "visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 729 | 
            +
                "visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
         | 
| 730 | 
            +
                "visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
         | 
| 731 | 
            +
                "visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
         | 
| 732 | 
            +
                "visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
         | 
| 733 | 
            +
                "visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
         | 
| 734 | 
            +
                "visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
         | 
| 735 | 
            +
              }
         | 
| 736 | 
            +
            }
         | 
    	
        checkpoint-246/preprocessor_config.json
    ADDED
    
    | @@ -0,0 +1,29 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "do_convert_rgb": true,
         | 
| 3 | 
            +
              "do_normalize": true,
         | 
| 4 | 
            +
              "do_rescale": true,
         | 
| 5 | 
            +
              "do_resize": true,
         | 
| 6 | 
            +
              "image_mean": [
         | 
| 7 | 
            +
                0.48145466,
         | 
| 8 | 
            +
                0.4578275,
         | 
| 9 | 
            +
                0.40821073
         | 
| 10 | 
            +
              ],
         | 
| 11 | 
            +
              "image_processor_type": "Qwen2VLImageProcessor",
         | 
| 12 | 
            +
              "image_std": [
         | 
| 13 | 
            +
                0.26862954,
         | 
| 14 | 
            +
                0.26130258,
         | 
| 15 | 
            +
                0.27577711
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "max_pixels": 4014080,
         | 
| 18 | 
            +
              "merge_size": 2,
         | 
| 19 | 
            +
              "min_pixels": 3136,
         | 
| 20 | 
            +
              "patch_size": 14,
         | 
| 21 | 
            +
              "processor_class": "Qwen2_5_VLProcessor",
         | 
| 22 | 
            +
              "resample": 3,
         | 
| 23 | 
            +
              "rescale_factor": 0.00392156862745098,
         | 
| 24 | 
            +
              "size": {
         | 
| 25 | 
            +
                "longest_edge": 12845056,
         | 
| 26 | 
            +
                "shortest_edge": 3136
         | 
| 27 | 
            +
              },
         | 
| 28 | 
            +
              "temporal_patch_size": 2
         | 
| 29 | 
            +
            }
         | 
    	
        checkpoint-246/special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,31 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "additional_special_tokens": [
         | 
| 3 | 
            +
                "<|im_start|>",
         | 
| 4 | 
            +
                "<|im_end|>",
         | 
| 5 | 
            +
                "<|object_ref_start|>",
         | 
| 6 | 
            +
                "<|object_ref_end|>",
         | 
| 7 | 
            +
                "<|box_start|>",
         | 
| 8 | 
            +
                "<|box_end|>",
         | 
| 9 | 
            +
                "<|quad_start|>",
         | 
| 10 | 
            +
                "<|quad_end|>",
         | 
| 11 | 
            +
                "<|vision_start|>",
         | 
| 12 | 
            +
                "<|vision_end|>",
         | 
| 13 | 
            +
                "<|vision_pad|>",
         | 
| 14 | 
            +
                "<|image_pad|>",
         | 
| 15 | 
            +
                "<|video_pad|>"
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "eos_token": {
         | 
| 18 | 
            +
                "content": "<|im_end|>",
         | 
| 19 | 
            +
                "lstrip": false,
         | 
| 20 | 
            +
                "normalized": false,
         | 
| 21 | 
            +
                "rstrip": false,
         | 
| 22 | 
            +
                "single_word": false
         | 
| 23 | 
            +
              },
         | 
| 24 | 
            +
              "pad_token": {
         | 
| 25 | 
            +
                "content": "<|endoftext|>",
         | 
| 26 | 
            +
                "lstrip": false,
         | 
| 27 | 
            +
                "normalized": false,
         | 
| 28 | 
            +
                "rstrip": false,
         | 
| 29 | 
            +
                "single_word": false
         | 
| 30 | 
            +
              }
         | 
| 31 | 
            +
            }
         | 
    	
        checkpoint-246/tokenizer.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
         | 
| 3 | 
            +
            size 11421896
         | 
    	
        checkpoint-246/tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,209 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "add_bos_token": false,
         | 
| 3 | 
            +
              "add_prefix_space": false,
         | 
| 4 | 
            +
              "added_tokens_decoder": {
         | 
| 5 | 
            +
                "151643": {
         | 
| 6 | 
            +
                  "content": "<|endoftext|>",
         | 
| 7 | 
            +
                  "lstrip": false,
         | 
| 8 | 
            +
                  "normalized": false,
         | 
| 9 | 
            +
                  "rstrip": false,
         | 
| 10 | 
            +
                  "single_word": false,
         | 
| 11 | 
            +
                  "special": true
         | 
| 12 | 
            +
                },
         | 
| 13 | 
            +
                "151644": {
         | 
| 14 | 
            +
                  "content": "<|im_start|>",
         | 
| 15 | 
            +
                  "lstrip": false,
         | 
| 16 | 
            +
                  "normalized": false,
         | 
| 17 | 
            +
                  "rstrip": false,
         | 
| 18 | 
            +
                  "single_word": false,
         | 
| 19 | 
            +
                  "special": true
         | 
| 20 | 
            +
                },
         | 
| 21 | 
            +
                "151645": {
         | 
| 22 | 
            +
                  "content": "<|im_end|>",
         | 
| 23 | 
            +
                  "lstrip": false,
         | 
| 24 | 
            +
                  "normalized": false,
         | 
| 25 | 
            +
                  "rstrip": false,
         | 
| 26 | 
            +
                  "single_word": false,
         | 
| 27 | 
            +
                  "special": true
         | 
| 28 | 
            +
                },
         | 
| 29 | 
            +
                "151646": {
         | 
| 30 | 
            +
                  "content": "<|object_ref_start|>",
         | 
| 31 | 
            +
                  "lstrip": false,
         | 
| 32 | 
            +
                  "normalized": false,
         | 
| 33 | 
            +
                  "rstrip": false,
         | 
| 34 | 
            +
                  "single_word": false,
         | 
| 35 | 
            +
                  "special": true
         | 
| 36 | 
            +
                },
         | 
| 37 | 
            +
                "151647": {
         | 
| 38 | 
            +
                  "content": "<|object_ref_end|>",
         | 
| 39 | 
            +
                  "lstrip": false,
         | 
| 40 | 
            +
                  "normalized": false,
         | 
| 41 | 
            +
                  "rstrip": false,
         | 
| 42 | 
            +
                  "single_word": false,
         | 
| 43 | 
            +
                  "special": true
         | 
| 44 | 
            +
                },
         | 
| 45 | 
            +
                "151648": {
         | 
| 46 | 
            +
                  "content": "<|box_start|>",
         | 
| 47 | 
            +
                  "lstrip": false,
         | 
| 48 | 
            +
                  "normalized": false,
         | 
| 49 | 
            +
                  "rstrip": false,
         | 
| 50 | 
            +
                  "single_word": false,
         | 
| 51 | 
            +
                  "special": true
         | 
| 52 | 
            +
                },
         | 
| 53 | 
            +
                "151649": {
         | 
| 54 | 
            +
                  "content": "<|box_end|>",
         | 
| 55 | 
            +
                  "lstrip": false,
         | 
| 56 | 
            +
                  "normalized": false,
         | 
| 57 | 
            +
                  "rstrip": false,
         | 
| 58 | 
            +
                  "single_word": false,
         | 
| 59 | 
            +
                  "special": true
         | 
| 60 | 
            +
                },
         | 
| 61 | 
            +
                "151650": {
         | 
| 62 | 
            +
                  "content": "<|quad_start|>",
         | 
| 63 | 
            +
                  "lstrip": false,
         | 
| 64 | 
            +
                  "normalized": false,
         | 
| 65 | 
            +
                  "rstrip": false,
         | 
| 66 | 
            +
                  "single_word": false,
         | 
| 67 | 
            +
                  "special": true
         | 
| 68 | 
            +
                },
         | 
| 69 | 
            +
                "151651": {
         | 
| 70 | 
            +
                  "content": "<|quad_end|>",
         | 
| 71 | 
            +
                  "lstrip": false,
         | 
| 72 | 
            +
                  "normalized": false,
         | 
| 73 | 
            +
                  "rstrip": false,
         | 
| 74 | 
            +
                  "single_word": false,
         | 
| 75 | 
            +
                  "special": true
         | 
| 76 | 
            +
                },
         | 
| 77 | 
            +
                "151652": {
         | 
| 78 | 
            +
                  "content": "<|vision_start|>",
         | 
| 79 | 
            +
                  "lstrip": false,
         | 
| 80 | 
            +
                  "normalized": false,
         | 
| 81 | 
            +
                  "rstrip": false,
         | 
| 82 | 
            +
                  "single_word": false,
         | 
| 83 | 
            +
                  "special": true
         | 
| 84 | 
            +
                },
         | 
| 85 | 
            +
                "151653": {
         | 
| 86 | 
            +
                  "content": "<|vision_end|>",
         | 
| 87 | 
            +
                  "lstrip": false,
         | 
| 88 | 
            +
                  "normalized": false,
         | 
| 89 | 
            +
                  "rstrip": false,
         | 
| 90 | 
            +
                  "single_word": false,
         | 
| 91 | 
            +
                  "special": true
         | 
| 92 | 
            +
                },
         | 
| 93 | 
            +
                "151654": {
         | 
| 94 | 
            +
                  "content": "<|vision_pad|>",
         | 
| 95 | 
            +
                  "lstrip": false,
         | 
| 96 | 
            +
                  "normalized": false,
         | 
| 97 | 
            +
                  "rstrip": false,
         | 
| 98 | 
            +
                  "single_word": false,
         | 
| 99 | 
            +
                  "special": true
         | 
| 100 | 
            +
                },
         | 
| 101 | 
            +
                "151655": {
         | 
| 102 | 
            +
                  "content": "<|image_pad|>",
         | 
| 103 | 
            +
                  "lstrip": false,
         | 
| 104 | 
            +
                  "normalized": false,
         | 
| 105 | 
            +
                  "rstrip": false,
         | 
| 106 | 
            +
                  "single_word": false,
         | 
| 107 | 
            +
                  "special": true
         | 
| 108 | 
            +
                },
         | 
| 109 | 
            +
                "151656": {
         | 
| 110 | 
            +
                  "content": "<|video_pad|>",
         | 
| 111 | 
            +
                  "lstrip": false,
         | 
| 112 | 
            +
                  "normalized": false,
         | 
| 113 | 
            +
                  "rstrip": false,
         | 
| 114 | 
            +
                  "single_word": false,
         | 
| 115 | 
            +
                  "special": true
         | 
| 116 | 
            +
                },
         | 
| 117 | 
            +
                "151657": {
         | 
| 118 | 
            +
                  "content": "<tool_call>",
         | 
| 119 | 
            +
                  "lstrip": false,
         | 
| 120 | 
            +
                  "normalized": false,
         | 
| 121 | 
            +
                  "rstrip": false,
         | 
| 122 | 
            +
                  "single_word": false,
         | 
| 123 | 
            +
                  "special": false
         | 
| 124 | 
            +
                },
         | 
| 125 | 
            +
                "151658": {
         | 
| 126 | 
            +
                  "content": "</tool_call>",
         | 
| 127 | 
            +
                  "lstrip": false,
         | 
| 128 | 
            +
                  "normalized": false,
         | 
| 129 | 
            +
                  "rstrip": false,
         | 
| 130 | 
            +
                  "single_word": false,
         | 
| 131 | 
            +
                  "special": false
         | 
| 132 | 
            +
                },
         | 
| 133 | 
            +
                "151659": {
         | 
| 134 | 
            +
                  "content": "<|fim_prefix|>",
         | 
| 135 | 
            +
                  "lstrip": false,
         | 
| 136 | 
            +
                  "normalized": false,
         | 
| 137 | 
            +
                  "rstrip": false,
         | 
| 138 | 
            +
                  "single_word": false,
         | 
| 139 | 
            +
                  "special": false
         | 
| 140 | 
            +
                },
         | 
| 141 | 
            +
                "151660": {
         | 
| 142 | 
            +
                  "content": "<|fim_middle|>",
         | 
| 143 | 
            +
                  "lstrip": false,
         | 
| 144 | 
            +
                  "normalized": false,
         | 
| 145 | 
            +
                  "rstrip": false,
         | 
| 146 | 
            +
                  "single_word": false,
         | 
| 147 | 
            +
                  "special": false
         | 
| 148 | 
            +
                },
         | 
| 149 | 
            +
                "151661": {
         | 
| 150 | 
            +
                  "content": "<|fim_suffix|>",
         | 
| 151 | 
            +
                  "lstrip": false,
         | 
| 152 | 
            +
                  "normalized": false,
         | 
| 153 | 
            +
                  "rstrip": false,
         | 
| 154 | 
            +
                  "single_word": false,
         | 
| 155 | 
            +
                  "special": false
         | 
| 156 | 
            +
                },
         | 
| 157 | 
            +
                "151662": {
         | 
| 158 | 
            +
                  "content": "<|fim_pad|>",
         | 
| 159 | 
            +
                  "lstrip": false,
         | 
| 160 | 
            +
                  "normalized": false,
         | 
| 161 | 
            +
                  "rstrip": false,
         | 
| 162 | 
            +
                  "single_word": false,
         | 
| 163 | 
            +
                  "special": false
         | 
| 164 | 
            +
                },
         | 
| 165 | 
            +
                "151663": {
         | 
| 166 | 
            +
                  "content": "<|repo_name|>",
         | 
| 167 | 
            +
                  "lstrip": false,
         | 
| 168 | 
            +
                  "normalized": false,
         | 
| 169 | 
            +
                  "rstrip": false,
         | 
| 170 | 
            +
                  "single_word": false,
         | 
| 171 | 
            +
                  "special": false
         | 
| 172 | 
            +
                },
         | 
| 173 | 
            +
                "151664": {
         | 
| 174 | 
            +
                  "content": "<|file_sep|>",
         | 
| 175 | 
            +
                  "lstrip": false,
         | 
| 176 | 
            +
                  "normalized": false,
         | 
| 177 | 
            +
                  "rstrip": false,
         | 
| 178 | 
            +
                  "single_word": false,
         | 
| 179 | 
            +
                  "special": false
         | 
| 180 | 
            +
                }
         | 
| 181 | 
            +
              },
         | 
| 182 | 
            +
              "additional_special_tokens": [
         | 
| 183 | 
            +
                "<|im_start|>",
         | 
| 184 | 
            +
                "<|im_end|>",
         | 
| 185 | 
            +
                "<|object_ref_start|>",
         | 
| 186 | 
            +
                "<|object_ref_end|>",
         | 
| 187 | 
            +
                "<|box_start|>",
         | 
| 188 | 
            +
                "<|box_end|>",
         | 
| 189 | 
            +
                "<|quad_start|>",
         | 
| 190 | 
            +
                "<|quad_end|>",
         | 
| 191 | 
            +
                "<|vision_start|>",
         | 
| 192 | 
            +
                "<|vision_end|>",
         | 
| 193 | 
            +
                "<|vision_pad|>",
         | 
| 194 | 
            +
                "<|image_pad|>",
         | 
| 195 | 
            +
                "<|video_pad|>"
         | 
| 196 | 
            +
              ],
         | 
| 197 | 
            +
              "bos_token": null,
         | 
| 198 | 
            +
              "chat_template": "{%- if tools %}\n    {{- '<|im_start|>system\\n' }}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- messages[0]['content'] }}\n    {%- else %}\n        {{- 'You are a helpful assistant.' }}\n    {%- endif %}\n    {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n    {%- for tool in tools %}\n        {{- \"\\n\" }}\n        {{- tool | tojson }}\n    {%- endfor %}\n    {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n    {%- else %}\n        {{- '<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n' }}\n    {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + message.content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
         | 
| 199 | 
            +
              "clean_up_tokenization_spaces": false,
         | 
| 200 | 
            +
              "eos_token": "<|im_end|>",
         | 
| 201 | 
            +
              "errors": "replace",
         | 
| 202 | 
            +
              "extra_special_tokens": {},
         | 
| 203 | 
            +
              "model_max_length": 131072,
         | 
| 204 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 205 | 
            +
              "processor_class": "Qwen2_5_VLProcessor",
         | 
| 206 | 
            +
              "split_special_tokens": false,
         | 
| 207 | 
            +
              "tokenizer_class": "Qwen2Tokenizer",
         | 
| 208 | 
            +
              "unk_token": null
         | 
| 209 | 
            +
            }
         | 
    	
        checkpoint-246/trainer_state.json
    ADDED
    
    | @@ -0,0 +1,1755 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "best_metric": null,
         | 
| 3 | 
            +
              "best_model_checkpoint": null,
         | 
| 4 | 
            +
              "epoch": 1.0,
         | 
| 5 | 
            +
              "eval_steps": 1.0,
         | 
| 6 | 
            +
              "global_step": 246,
         | 
| 7 | 
            +
              "is_hyper_param_search": false,
         | 
| 8 | 
            +
              "is_local_process_zero": true,
         | 
| 9 | 
            +
              "is_world_process_zero": true,
         | 
| 10 | 
            +
              "log_history": [
         | 
| 11 | 
            +
                {
         | 
| 12 | 
            +
                  "epoch": 0.004073319755600814,
         | 
| 13 | 
            +
                  "grad_norm": 34.300819396972656,
         | 
| 14 | 
            +
                  "learning_rate": 8.130081300813008e-09,
         | 
| 15 | 
            +
                  "loss": 1.59619802236557,
         | 
| 16 | 
            +
                  "step": 1
         | 
| 17 | 
            +
                },
         | 
| 18 | 
            +
                {
         | 
| 19 | 
            +
                  "epoch": 0.008146639511201629,
         | 
| 20 | 
            +
                  "grad_norm": 30.720197677612305,
         | 
| 21 | 
            +
                  "learning_rate": 1.6260162601626016e-08,
         | 
| 22 | 
            +
                  "loss": 1.468272864818573,
         | 
| 23 | 
            +
                  "step": 2
         | 
| 24 | 
            +
                },
         | 
| 25 | 
            +
                {
         | 
| 26 | 
            +
                  "epoch": 0.012219959266802444,
         | 
| 27 | 
            +
                  "grad_norm": 30.16754722595215,
         | 
| 28 | 
            +
                  "learning_rate": 2.4390243902439023e-08,
         | 
| 29 | 
            +
                  "loss": 1.3843095302581787,
         | 
| 30 | 
            +
                  "step": 3
         | 
| 31 | 
            +
                },
         | 
| 32 | 
            +
                {
         | 
| 33 | 
            +
                  "epoch": 0.016293279022403257,
         | 
| 34 | 
            +
                  "grad_norm": 38.58047103881836,
         | 
| 35 | 
            +
                  "learning_rate": 3.252032520325203e-08,
         | 
| 36 | 
            +
                  "loss": 1.7031245231628418,
         | 
| 37 | 
            +
                  "step": 4
         | 
| 38 | 
            +
                },
         | 
| 39 | 
            +
                {
         | 
| 40 | 
            +
                  "epoch": 0.020366598778004074,
         | 
| 41 | 
            +
                  "grad_norm": 30.89760971069336,
         | 
| 42 | 
            +
                  "learning_rate": 4.065040650406504e-08,
         | 
| 43 | 
            +
                  "loss": 1.4844104647636414,
         | 
| 44 | 
            +
                  "step": 5
         | 
| 45 | 
            +
                },
         | 
| 46 | 
            +
                {
         | 
| 47 | 
            +
                  "epoch": 0.024439918533604887,
         | 
| 48 | 
            +
                  "grad_norm": 34.434993743896484,
         | 
| 49 | 
            +
                  "learning_rate": 4.878048780487805e-08,
         | 
| 50 | 
            +
                  "loss": 1.574910283088684,
         | 
| 51 | 
            +
                  "step": 6
         | 
| 52 | 
            +
                },
         | 
| 53 | 
            +
                {
         | 
| 54 | 
            +
                  "epoch": 0.028513238289205704,
         | 
| 55 | 
            +
                  "grad_norm": 32.540470123291016,
         | 
| 56 | 
            +
                  "learning_rate": 5.6910569105691055e-08,
         | 
| 57 | 
            +
                  "loss": 1.4606674909591675,
         | 
| 58 | 
            +
                  "step": 7
         | 
| 59 | 
            +
                },
         | 
| 60 | 
            +
                {
         | 
| 61 | 
            +
                  "epoch": 0.032586558044806514,
         | 
| 62 | 
            +
                  "grad_norm": 36.41299819946289,
         | 
| 63 | 
            +
                  "learning_rate": 6.504065040650406e-08,
         | 
| 64 | 
            +
                  "loss": 1.553576111793518,
         | 
| 65 | 
            +
                  "step": 8
         | 
| 66 | 
            +
                },
         | 
| 67 | 
            +
                {
         | 
| 68 | 
            +
                  "epoch": 0.03665987780040733,
         | 
| 69 | 
            +
                  "grad_norm": 34.50511932373047,
         | 
| 70 | 
            +
                  "learning_rate": 7.317073170731706e-08,
         | 
| 71 | 
            +
                  "loss": 1.3344553709030151,
         | 
| 72 | 
            +
                  "step": 9
         | 
| 73 | 
            +
                },
         | 
| 74 | 
            +
                {
         | 
| 75 | 
            +
                  "epoch": 0.04073319755600815,
         | 
| 76 | 
            +
                  "grad_norm": 27.898704528808594,
         | 
| 77 | 
            +
                  "learning_rate": 8.130081300813008e-08,
         | 
| 78 | 
            +
                  "loss": 1.3406395316123962,
         | 
| 79 | 
            +
                  "step": 10
         | 
| 80 | 
            +
                },
         | 
| 81 | 
            +
                {
         | 
| 82 | 
            +
                  "epoch": 0.04480651731160896,
         | 
| 83 | 
            +
                  "grad_norm": 29.29271125793457,
         | 
| 84 | 
            +
                  "learning_rate": 8.943089430894309e-08,
         | 
| 85 | 
            +
                  "loss": 1.4415303468704224,
         | 
| 86 | 
            +
                  "step": 11
         | 
| 87 | 
            +
                },
         | 
| 88 | 
            +
                {
         | 
| 89 | 
            +
                  "epoch": 0.048879837067209775,
         | 
| 90 | 
            +
                  "grad_norm": 28.2354736328125,
         | 
| 91 | 
            +
                  "learning_rate": 9.75609756097561e-08,
         | 
| 92 | 
            +
                  "loss": 1.2696096301078796,
         | 
| 93 | 
            +
                  "step": 12
         | 
| 94 | 
            +
                },
         | 
| 95 | 
            +
                {
         | 
| 96 | 
            +
                  "epoch": 0.05295315682281059,
         | 
| 97 | 
            +
                  "grad_norm": 35.44163131713867,
         | 
| 98 | 
            +
                  "learning_rate": 1.0569105691056911e-07,
         | 
| 99 | 
            +
                  "loss": 1.598312497138977,
         | 
| 100 | 
            +
                  "step": 13
         | 
| 101 | 
            +
                },
         | 
| 102 | 
            +
                {
         | 
| 103 | 
            +
                  "epoch": 0.05702647657841141,
         | 
| 104 | 
            +
                  "grad_norm": 26.94402313232422,
         | 
| 105 | 
            +
                  "learning_rate": 1.1382113821138211e-07,
         | 
| 106 | 
            +
                  "loss": 1.3497812747955322,
         | 
| 107 | 
            +
                  "step": 14
         | 
| 108 | 
            +
                },
         | 
| 109 | 
            +
                {
         | 
| 110 | 
            +
                  "epoch": 0.06109979633401222,
         | 
| 111 | 
            +
                  "grad_norm": 37.78248977661133,
         | 
| 112 | 
            +
                  "learning_rate": 1.219512195121951e-07,
         | 
| 113 | 
            +
                  "loss": 1.5689660906791687,
         | 
| 114 | 
            +
                  "step": 15
         | 
| 115 | 
            +
                },
         | 
| 116 | 
            +
                {
         | 
| 117 | 
            +
                  "epoch": 0.06517311608961303,
         | 
| 118 | 
            +
                  "grad_norm": 31.73078155517578,
         | 
| 119 | 
            +
                  "learning_rate": 1.3008130081300813e-07,
         | 
| 120 | 
            +
                  "loss": 1.525648295879364,
         | 
| 121 | 
            +
                  "step": 16
         | 
| 122 | 
            +
                },
         | 
| 123 | 
            +
                {
         | 
| 124 | 
            +
                  "epoch": 0.06924643584521385,
         | 
| 125 | 
            +
                  "grad_norm": 27.77250862121582,
         | 
| 126 | 
            +
                  "learning_rate": 1.3821138211382114e-07,
         | 
| 127 | 
            +
                  "loss": 1.304672360420227,
         | 
| 128 | 
            +
                  "step": 17
         | 
| 129 | 
            +
                },
         | 
| 130 | 
            +
                {
         | 
| 131 | 
            +
                  "epoch": 0.07331975560081466,
         | 
| 132 | 
            +
                  "grad_norm": 28.092498779296875,
         | 
| 133 | 
            +
                  "learning_rate": 1.4634146341463413e-07,
         | 
| 134 | 
            +
                  "loss": 1.346445381641388,
         | 
| 135 | 
            +
                  "step": 18
         | 
| 136 | 
            +
                },
         | 
| 137 | 
            +
                {
         | 
| 138 | 
            +
                  "epoch": 0.07739307535641547,
         | 
| 139 | 
            +
                  "grad_norm": 30.995866775512695,
         | 
| 140 | 
            +
                  "learning_rate": 1.5447154471544717e-07,
         | 
| 141 | 
            +
                  "loss": 1.447025179862976,
         | 
| 142 | 
            +
                  "step": 19
         | 
| 143 | 
            +
                },
         | 
| 144 | 
            +
                {
         | 
| 145 | 
            +
                  "epoch": 0.0814663951120163,
         | 
| 146 | 
            +
                  "grad_norm": 28.858421325683594,
         | 
| 147 | 
            +
                  "learning_rate": 1.6260162601626016e-07,
         | 
| 148 | 
            +
                  "loss": 1.3801668882369995,
         | 
| 149 | 
            +
                  "step": 20
         | 
| 150 | 
            +
                },
         | 
| 151 | 
            +
                {
         | 
| 152 | 
            +
                  "epoch": 0.0855397148676171,
         | 
| 153 | 
            +
                  "grad_norm": 31.91228485107422,
         | 
| 154 | 
            +
                  "learning_rate": 1.7073170731707317e-07,
         | 
| 155 | 
            +
                  "loss": 1.4577875137329102,
         | 
| 156 | 
            +
                  "step": 21
         | 
| 157 | 
            +
                },
         | 
| 158 | 
            +
                {
         | 
| 159 | 
            +
                  "epoch": 0.08961303462321792,
         | 
| 160 | 
            +
                  "grad_norm": 31.215259552001953,
         | 
| 161 | 
            +
                  "learning_rate": 1.7886178861788619e-07,
         | 
| 162 | 
            +
                  "loss": 1.4091373682022095,
         | 
| 163 | 
            +
                  "step": 22
         | 
| 164 | 
            +
                },
         | 
| 165 | 
            +
                {
         | 
| 166 | 
            +
                  "epoch": 0.09368635437881874,
         | 
| 167 | 
            +
                  "grad_norm": 30.24734115600586,
         | 
| 168 | 
            +
                  "learning_rate": 1.8699186991869917e-07,
         | 
| 169 | 
            +
                  "loss": 1.4649581909179688,
         | 
| 170 | 
            +
                  "step": 23
         | 
| 171 | 
            +
                },
         | 
| 172 | 
            +
                {
         | 
| 173 | 
            +
                  "epoch": 0.09775967413441955,
         | 
| 174 | 
            +
                  "grad_norm": 31.560291290283203,
         | 
| 175 | 
            +
                  "learning_rate": 1.951219512195122e-07,
         | 
| 176 | 
            +
                  "loss": 1.5308585166931152,
         | 
| 177 | 
            +
                  "step": 24
         | 
| 178 | 
            +
                },
         | 
| 179 | 
            +
                {
         | 
| 180 | 
            +
                  "epoch": 0.10183299389002037,
         | 
| 181 | 
            +
                  "grad_norm": 27.27391242980957,
         | 
| 182 | 
            +
                  "learning_rate": 2.032520325203252e-07,
         | 
| 183 | 
            +
                  "loss": 1.5144553780555725,
         | 
| 184 | 
            +
                  "step": 25
         | 
| 185 | 
            +
                },
         | 
| 186 | 
            +
                {
         | 
| 187 | 
            +
                  "epoch": 0.10590631364562118,
         | 
| 188 | 
            +
                  "grad_norm": 29.813785552978516,
         | 
| 189 | 
            +
                  "learning_rate": 2.1138211382113822e-07,
         | 
| 190 | 
            +
                  "loss": 1.519466757774353,
         | 
| 191 | 
            +
                  "step": 26
         | 
| 192 | 
            +
                },
         | 
| 193 | 
            +
                {
         | 
| 194 | 
            +
                  "epoch": 0.109979633401222,
         | 
| 195 | 
            +
                  "grad_norm": 24.201751708984375,
         | 
| 196 | 
            +
                  "learning_rate": 2.195121951219512e-07,
         | 
| 197 | 
            +
                  "loss": 1.3116011023521423,
         | 
| 198 | 
            +
                  "step": 27
         | 
| 199 | 
            +
                },
         | 
| 200 | 
            +
                {
         | 
| 201 | 
            +
                  "epoch": 0.11405295315682282,
         | 
| 202 | 
            +
                  "grad_norm": 27.95865249633789,
         | 
| 203 | 
            +
                  "learning_rate": 2.2764227642276422e-07,
         | 
| 204 | 
            +
                  "loss": 1.4637184143066406,
         | 
| 205 | 
            +
                  "step": 28
         | 
| 206 | 
            +
                },
         | 
| 207 | 
            +
                {
         | 
| 208 | 
            +
                  "epoch": 0.11812627291242363,
         | 
| 209 | 
            +
                  "grad_norm": 26.65915870666504,
         | 
| 210 | 
            +
                  "learning_rate": 2.3577235772357723e-07,
         | 
| 211 | 
            +
                  "loss": 1.4885194301605225,
         | 
| 212 | 
            +
                  "step": 29
         | 
| 213 | 
            +
                },
         | 
| 214 | 
            +
                {
         | 
| 215 | 
            +
                  "epoch": 0.12219959266802444,
         | 
| 216 | 
            +
                  "grad_norm": 27.386289596557617,
         | 
| 217 | 
            +
                  "learning_rate": 2.439024390243902e-07,
         | 
| 218 | 
            +
                  "loss": 1.3836334347724915,
         | 
| 219 | 
            +
                  "step": 30
         | 
| 220 | 
            +
                },
         | 
| 221 | 
            +
                {
         | 
| 222 | 
            +
                  "epoch": 0.12627291242362526,
         | 
| 223 | 
            +
                  "grad_norm": 25.87419319152832,
         | 
| 224 | 
            +
                  "learning_rate": 2.520325203252032e-07,
         | 
| 225 | 
            +
                  "loss": 1.3642336130142212,
         | 
| 226 | 
            +
                  "step": 31
         | 
| 227 | 
            +
                },
         | 
| 228 | 
            +
                {
         | 
| 229 | 
            +
                  "epoch": 0.13034623217922606,
         | 
| 230 | 
            +
                  "grad_norm": 26.620105743408203,
         | 
| 231 | 
            +
                  "learning_rate": 2.6016260162601625e-07,
         | 
| 232 | 
            +
                  "loss": 1.3461121916770935,
         | 
| 233 | 
            +
                  "step": 32
         | 
| 234 | 
            +
                },
         | 
| 235 | 
            +
                {
         | 
| 236 | 
            +
                  "epoch": 0.13441955193482688,
         | 
| 237 | 
            +
                  "grad_norm": 22.665058135986328,
         | 
| 238 | 
            +
                  "learning_rate": 2.682926829268293e-07,
         | 
| 239 | 
            +
                  "loss": 1.2577590942382812,
         | 
| 240 | 
            +
                  "step": 33
         | 
| 241 | 
            +
                },
         | 
| 242 | 
            +
                {
         | 
| 243 | 
            +
                  "epoch": 0.1384928716904277,
         | 
| 244 | 
            +
                  "grad_norm": 23.679920196533203,
         | 
| 245 | 
            +
                  "learning_rate": 2.764227642276423e-07,
         | 
| 246 | 
            +
                  "loss": 1.2572017908096313,
         | 
| 247 | 
            +
                  "step": 34
         | 
| 248 | 
            +
                },
         | 
| 249 | 
            +
                {
         | 
| 250 | 
            +
                  "epoch": 0.1425661914460285,
         | 
| 251 | 
            +
                  "grad_norm": 25.136371612548828,
         | 
| 252 | 
            +
                  "learning_rate": 2.8455284552845527e-07,
         | 
| 253 | 
            +
                  "loss": 1.2670851349830627,
         | 
| 254 | 
            +
                  "step": 35
         | 
| 255 | 
            +
                },
         | 
| 256 | 
            +
                {
         | 
| 257 | 
            +
                  "epoch": 0.14663951120162932,
         | 
| 258 | 
            +
                  "grad_norm": 21.567337036132812,
         | 
| 259 | 
            +
                  "learning_rate": 2.9268292682926825e-07,
         | 
| 260 | 
            +
                  "loss": 1.242683231830597,
         | 
| 261 | 
            +
                  "step": 36
         | 
| 262 | 
            +
                },
         | 
| 263 | 
            +
                {
         | 
| 264 | 
            +
                  "epoch": 0.15071283095723015,
         | 
| 265 | 
            +
                  "grad_norm": 20.61647605895996,
         | 
| 266 | 
            +
                  "learning_rate": 3.008130081300813e-07,
         | 
| 267 | 
            +
                  "loss": 1.279579222202301,
         | 
| 268 | 
            +
                  "step": 37
         | 
| 269 | 
            +
                },
         | 
| 270 | 
            +
                {
         | 
| 271 | 
            +
                  "epoch": 0.15478615071283094,
         | 
| 272 | 
            +
                  "grad_norm": 20.656513214111328,
         | 
| 273 | 
            +
                  "learning_rate": 3.0894308943089434e-07,
         | 
| 274 | 
            +
                  "loss": 1.2040475606918335,
         | 
| 275 | 
            +
                  "step": 38
         | 
| 276 | 
            +
                },
         | 
| 277 | 
            +
                {
         | 
| 278 | 
            +
                  "epoch": 0.15885947046843177,
         | 
| 279 | 
            +
                  "grad_norm": 22.86530876159668,
         | 
| 280 | 
            +
                  "learning_rate": 3.170731707317073e-07,
         | 
| 281 | 
            +
                  "loss": 1.2522715330123901,
         | 
| 282 | 
            +
                  "step": 39
         | 
| 283 | 
            +
                },
         | 
| 284 | 
            +
                {
         | 
| 285 | 
            +
                  "epoch": 0.1629327902240326,
         | 
| 286 | 
            +
                  "grad_norm": 20.22757911682129,
         | 
| 287 | 
            +
                  "learning_rate": 3.252032520325203e-07,
         | 
| 288 | 
            +
                  "loss": 1.2012774348258972,
         | 
| 289 | 
            +
                  "step": 40
         | 
| 290 | 
            +
                },
         | 
| 291 | 
            +
                {
         | 
| 292 | 
            +
                  "epoch": 0.1670061099796334,
         | 
| 293 | 
            +
                  "grad_norm": 23.09739875793457,
         | 
| 294 | 
            +
                  "learning_rate": 3.333333333333333e-07,
         | 
| 295 | 
            +
                  "loss": 1.2088268399238586,
         | 
| 296 | 
            +
                  "step": 41
         | 
| 297 | 
            +
                },
         | 
| 298 | 
            +
                {
         | 
| 299 | 
            +
                  "epoch": 0.1710794297352342,
         | 
| 300 | 
            +
                  "grad_norm": 22.845685958862305,
         | 
| 301 | 
            +
                  "learning_rate": 3.4146341463414634e-07,
         | 
| 302 | 
            +
                  "loss": 1.0982880592346191,
         | 
| 303 | 
            +
                  "step": 42
         | 
| 304 | 
            +
                },
         | 
| 305 | 
            +
                {
         | 
| 306 | 
            +
                  "epoch": 0.17515274949083504,
         | 
| 307 | 
            +
                  "grad_norm": 19.80814552307129,
         | 
| 308 | 
            +
                  "learning_rate": 3.4959349593495933e-07,
         | 
| 309 | 
            +
                  "loss": 1.1271469593048096,
         | 
| 310 | 
            +
                  "step": 43
         | 
| 311 | 
            +
                },
         | 
| 312 | 
            +
                {
         | 
| 313 | 
            +
                  "epoch": 0.17922606924643583,
         | 
| 314 | 
            +
                  "grad_norm": 20.553686141967773,
         | 
| 315 | 
            +
                  "learning_rate": 3.5772357723577237e-07,
         | 
| 316 | 
            +
                  "loss": 1.0008204579353333,
         | 
| 317 | 
            +
                  "step": 44
         | 
| 318 | 
            +
                },
         | 
| 319 | 
            +
                {
         | 
| 320 | 
            +
                  "epoch": 0.18329938900203666,
         | 
| 321 | 
            +
                  "grad_norm": 16.66282844543457,
         | 
| 322 | 
            +
                  "learning_rate": 3.6585365853658536e-07,
         | 
| 323 | 
            +
                  "loss": 0.9251897931098938,
         | 
| 324 | 
            +
                  "step": 45
         | 
| 325 | 
            +
                },
         | 
| 326 | 
            +
                {
         | 
| 327 | 
            +
                  "epoch": 0.18737270875763748,
         | 
| 328 | 
            +
                  "grad_norm": 15.797308921813965,
         | 
| 329 | 
            +
                  "learning_rate": 3.7398373983739835e-07,
         | 
| 330 | 
            +
                  "loss": 1.0191328525543213,
         | 
| 331 | 
            +
                  "step": 46
         | 
| 332 | 
            +
                },
         | 
| 333 | 
            +
                {
         | 
| 334 | 
            +
                  "epoch": 0.19144602851323828,
         | 
| 335 | 
            +
                  "grad_norm": 13.579208374023438,
         | 
| 336 | 
            +
                  "learning_rate": 3.821138211382114e-07,
         | 
| 337 | 
            +
                  "loss": 0.774791806936264,
         | 
| 338 | 
            +
                  "step": 47
         | 
| 339 | 
            +
                },
         | 
| 340 | 
            +
                {
         | 
| 341 | 
            +
                  "epoch": 0.1955193482688391,
         | 
| 342 | 
            +
                  "grad_norm": 14.556002616882324,
         | 
| 343 | 
            +
                  "learning_rate": 3.902439024390244e-07,
         | 
| 344 | 
            +
                  "loss": 1.0026790797710419,
         | 
| 345 | 
            +
                  "step": 48
         | 
| 346 | 
            +
                },
         | 
| 347 | 
            +
                {
         | 
| 348 | 
            +
                  "epoch": 0.19959266802443992,
         | 
| 349 | 
            +
                  "grad_norm": 14.489509582519531,
         | 
| 350 | 
            +
                  "learning_rate": 3.9837398373983736e-07,
         | 
| 351 | 
            +
                  "loss": 0.9430837631225586,
         | 
| 352 | 
            +
                  "step": 49
         | 
| 353 | 
            +
                },
         | 
| 354 | 
            +
                {
         | 
| 355 | 
            +
                  "epoch": 0.20366598778004075,
         | 
| 356 | 
            +
                  "grad_norm": 12.495223999023438,
         | 
| 357 | 
            +
                  "learning_rate": 4.065040650406504e-07,
         | 
| 358 | 
            +
                  "loss": 0.8999880254268646,
         | 
| 359 | 
            +
                  "step": 50
         | 
| 360 | 
            +
                },
         | 
| 361 | 
            +
                {
         | 
| 362 | 
            +
                  "epoch": 0.20773930753564154,
         | 
| 363 | 
            +
                  "grad_norm": 11.441575050354004,
         | 
| 364 | 
            +
                  "learning_rate": 4.146341463414634e-07,
         | 
| 365 | 
            +
                  "loss": 0.8320233225822449,
         | 
| 366 | 
            +
                  "step": 51
         | 
| 367 | 
            +
                },
         | 
| 368 | 
            +
                {
         | 
| 369 | 
            +
                  "epoch": 0.21181262729124237,
         | 
| 370 | 
            +
                  "grad_norm": 10.894216537475586,
         | 
| 371 | 
            +
                  "learning_rate": 4.2276422764227643e-07,
         | 
| 372 | 
            +
                  "loss": 0.8139239549636841,
         | 
| 373 | 
            +
                  "step": 52
         | 
| 374 | 
            +
                },
         | 
| 375 | 
            +
                {
         | 
| 376 | 
            +
                  "epoch": 0.2158859470468432,
         | 
| 377 | 
            +
                  "grad_norm": 10.404220581054688,
         | 
| 378 | 
            +
                  "learning_rate": 4.308943089430894e-07,
         | 
| 379 | 
            +
                  "loss": 0.8323288261890411,
         | 
| 380 | 
            +
                  "step": 53
         | 
| 381 | 
            +
                },
         | 
| 382 | 
            +
                {
         | 
| 383 | 
            +
                  "epoch": 0.219959266802444,
         | 
| 384 | 
            +
                  "grad_norm": 10.463072776794434,
         | 
| 385 | 
            +
                  "learning_rate": 4.390243902439024e-07,
         | 
| 386 | 
            +
                  "loss": 0.882573276758194,
         | 
| 387 | 
            +
                  "step": 54
         | 
| 388 | 
            +
                },
         | 
| 389 | 
            +
                {
         | 
| 390 | 
            +
                  "epoch": 0.2240325865580448,
         | 
| 391 | 
            +
                  "grad_norm": 10.669075012207031,
         | 
| 392 | 
            +
                  "learning_rate": 4.471544715447154e-07,
         | 
| 393 | 
            +
                  "loss": 0.749780923128128,
         | 
| 394 | 
            +
                  "step": 55
         | 
| 395 | 
            +
                },
         | 
| 396 | 
            +
                {
         | 
| 397 | 
            +
                  "epoch": 0.22810590631364563,
         | 
| 398 | 
            +
                  "grad_norm": 10.453638076782227,
         | 
| 399 | 
            +
                  "learning_rate": 4.5528455284552844e-07,
         | 
| 400 | 
            +
                  "loss": 0.7727148830890656,
         | 
| 401 | 
            +
                  "step": 56
         | 
| 402 | 
            +
                },
         | 
| 403 | 
            +
                {
         | 
| 404 | 
            +
                  "epoch": 0.23217922606924643,
         | 
| 405 | 
            +
                  "grad_norm": 11.427080154418945,
         | 
| 406 | 
            +
                  "learning_rate": 4.634146341463415e-07,
         | 
| 407 | 
            +
                  "loss": 0.8585084676742554,
         | 
| 408 | 
            +
                  "step": 57
         | 
| 409 | 
            +
                },
         | 
| 410 | 
            +
                {
         | 
| 411 | 
            +
                  "epoch": 0.23625254582484725,
         | 
| 412 | 
            +
                  "grad_norm": 8.558117866516113,
         | 
| 413 | 
            +
                  "learning_rate": 4.7154471544715447e-07,
         | 
| 414 | 
            +
                  "loss": 0.7314337491989136,
         | 
| 415 | 
            +
                  "step": 58
         | 
| 416 | 
            +
                },
         | 
| 417 | 
            +
                {
         | 
| 418 | 
            +
                  "epoch": 0.24032586558044808,
         | 
| 419 | 
            +
                  "grad_norm": 9.031648635864258,
         | 
| 420 | 
            +
                  "learning_rate": 4.796747967479675e-07,
         | 
| 421 | 
            +
                  "loss": 0.701579749584198,
         | 
| 422 | 
            +
                  "step": 59
         | 
| 423 | 
            +
                },
         | 
| 424 | 
            +
                {
         | 
| 425 | 
            +
                  "epoch": 0.24439918533604887,
         | 
| 426 | 
            +
                  "grad_norm": 8.817708969116211,
         | 
| 427 | 
            +
                  "learning_rate": 4.878048780487804e-07,
         | 
| 428 | 
            +
                  "loss": 0.7815204560756683,
         | 
| 429 | 
            +
                  "step": 60
         | 
| 430 | 
            +
                },
         | 
| 431 | 
            +
                {
         | 
| 432 | 
            +
                  "epoch": 0.2484725050916497,
         | 
| 433 | 
            +
                  "grad_norm": 8.00804615020752,
         | 
| 434 | 
            +
                  "learning_rate": 4.959349593495934e-07,
         | 
| 435 | 
            +
                  "loss": 0.655106246471405,
         | 
| 436 | 
            +
                  "step": 61
         | 
| 437 | 
            +
                },
         | 
| 438 | 
            +
                {
         | 
| 439 | 
            +
                  "epoch": 0.2525458248472505,
         | 
| 440 | 
            +
                  "grad_norm": 6.538842678070068,
         | 
| 441 | 
            +
                  "learning_rate": 5.040650406504064e-07,
         | 
| 442 | 
            +
                  "loss": 0.6697916388511658,
         | 
| 443 | 
            +
                  "step": 62
         | 
| 444 | 
            +
                },
         | 
| 445 | 
            +
                {
         | 
| 446 | 
            +
                  "epoch": 0.25661914460285135,
         | 
| 447 | 
            +
                  "grad_norm": 7.5446553230285645,
         | 
| 448 | 
            +
                  "learning_rate": 5.121951219512195e-07,
         | 
| 449 | 
            +
                  "loss": 0.7426944077014923,
         | 
| 450 | 
            +
                  "step": 63
         | 
| 451 | 
            +
                },
         | 
| 452 | 
            +
                {
         | 
| 453 | 
            +
                  "epoch": 0.2606924643584521,
         | 
| 454 | 
            +
                  "grad_norm": 6.402474403381348,
         | 
| 455 | 
            +
                  "learning_rate": 5.203252032520325e-07,
         | 
| 456 | 
            +
                  "loss": 0.6401277780532837,
         | 
| 457 | 
            +
                  "step": 64
         | 
| 458 | 
            +
                },
         | 
| 459 | 
            +
                {
         | 
| 460 | 
            +
                  "epoch": 0.26476578411405294,
         | 
| 461 | 
            +
                  "grad_norm": 7.257569313049316,
         | 
| 462 | 
            +
                  "learning_rate": 5.284552845528455e-07,
         | 
| 463 | 
            +
                  "loss": 0.6731106042861938,
         | 
| 464 | 
            +
                  "step": 65
         | 
| 465 | 
            +
                },
         | 
| 466 | 
            +
                {
         | 
| 467 | 
            +
                  "epoch": 0.26883910386965376,
         | 
| 468 | 
            +
                  "grad_norm": 6.263636589050293,
         | 
| 469 | 
            +
                  "learning_rate": 5.365853658536586e-07,
         | 
| 470 | 
            +
                  "loss": 0.5806022882461548,
         | 
| 471 | 
            +
                  "step": 66
         | 
| 472 | 
            +
                },
         | 
| 473 | 
            +
                {
         | 
| 474 | 
            +
                  "epoch": 0.2729124236252546,
         | 
| 475 | 
            +
                  "grad_norm": 5.273800849914551,
         | 
| 476 | 
            +
                  "learning_rate": 5.447154471544715e-07,
         | 
| 477 | 
            +
                  "loss": 0.5338439792394638,
         | 
| 478 | 
            +
                  "step": 67
         | 
| 479 | 
            +
                },
         | 
| 480 | 
            +
                {
         | 
| 481 | 
            +
                  "epoch": 0.2769857433808554,
         | 
| 482 | 
            +
                  "grad_norm": 5.2786149978637695,
         | 
| 483 | 
            +
                  "learning_rate": 5.528455284552846e-07,
         | 
| 484 | 
            +
                  "loss": 0.5390533208847046,
         | 
| 485 | 
            +
                  "step": 68
         | 
| 486 | 
            +
                },
         | 
| 487 | 
            +
                {
         | 
| 488 | 
            +
                  "epoch": 0.28105906313645623,
         | 
| 489 | 
            +
                  "grad_norm": 4.901702404022217,
         | 
| 490 | 
            +
                  "learning_rate": 5.609756097560975e-07,
         | 
| 491 | 
            +
                  "loss": 0.5899032056331635,
         | 
| 492 | 
            +
                  "step": 69
         | 
| 493 | 
            +
                },
         | 
| 494 | 
            +
                {
         | 
| 495 | 
            +
                  "epoch": 0.285132382892057,
         | 
| 496 | 
            +
                  "grad_norm": 4.853933811187744,
         | 
| 497 | 
            +
                  "learning_rate": 5.691056910569105e-07,
         | 
| 498 | 
            +
                  "loss": 0.5600310862064362,
         | 
| 499 | 
            +
                  "step": 70
         | 
| 500 | 
            +
                },
         | 
| 501 | 
            +
                {
         | 
| 502 | 
            +
                  "epoch": 0.2892057026476578,
         | 
| 503 | 
            +
                  "grad_norm": 4.680273532867432,
         | 
| 504 | 
            +
                  "learning_rate": 5.772357723577236e-07,
         | 
| 505 | 
            +
                  "loss": 0.5319355428218842,
         | 
| 506 | 
            +
                  "step": 71
         | 
| 507 | 
            +
                },
         | 
| 508 | 
            +
                {
         | 
| 509 | 
            +
                  "epoch": 0.29327902240325865,
         | 
| 510 | 
            +
                  "grad_norm": 3.7406885623931885,
         | 
| 511 | 
            +
                  "learning_rate": 5.853658536585365e-07,
         | 
| 512 | 
            +
                  "loss": 0.508156955242157,
         | 
| 513 | 
            +
                  "step": 72
         | 
| 514 | 
            +
                },
         | 
| 515 | 
            +
                {
         | 
| 516 | 
            +
                  "epoch": 0.2973523421588595,
         | 
| 517 | 
            +
                  "grad_norm": 4.389779567718506,
         | 
| 518 | 
            +
                  "learning_rate": 5.934959349593496e-07,
         | 
| 519 | 
            +
                  "loss": 0.49855048954486847,
         | 
| 520 | 
            +
                  "step": 73
         | 
| 521 | 
            +
                },
         | 
| 522 | 
            +
                {
         | 
| 523 | 
            +
                  "epoch": 0.3014256619144603,
         | 
| 524 | 
            +
                  "grad_norm": 4.23866081237793,
         | 
| 525 | 
            +
                  "learning_rate": 6.016260162601626e-07,
         | 
| 526 | 
            +
                  "loss": 0.5242476612329483,
         | 
| 527 | 
            +
                  "step": 74
         | 
| 528 | 
            +
                },
         | 
| 529 | 
            +
                {
         | 
| 530 | 
            +
                  "epoch": 0.3054989816700611,
         | 
| 531 | 
            +
                  "grad_norm": 4.1824951171875,
         | 
| 532 | 
            +
                  "learning_rate": 6.097560975609756e-07,
         | 
| 533 | 
            +
                  "loss": 0.532037615776062,
         | 
| 534 | 
            +
                  "step": 75
         | 
| 535 | 
            +
                },
         | 
| 536 | 
            +
                {
         | 
| 537 | 
            +
                  "epoch": 0.3095723014256619,
         | 
| 538 | 
            +
                  "grad_norm": 3.7223150730133057,
         | 
| 539 | 
            +
                  "learning_rate": 6.178861788617887e-07,
         | 
| 540 | 
            +
                  "loss": 0.46959882974624634,
         | 
| 541 | 
            +
                  "step": 76
         | 
| 542 | 
            +
                },
         | 
| 543 | 
            +
                {
         | 
| 544 | 
            +
                  "epoch": 0.3136456211812627,
         | 
| 545 | 
            +
                  "grad_norm": 3.545388698577881,
         | 
| 546 | 
            +
                  "learning_rate": 6.260162601626016e-07,
         | 
| 547 | 
            +
                  "loss": 0.4825982600450516,
         | 
| 548 | 
            +
                  "step": 77
         | 
| 549 | 
            +
                },
         | 
| 550 | 
            +
                {
         | 
| 551 | 
            +
                  "epoch": 0.31771894093686354,
         | 
| 552 | 
            +
                  "grad_norm": 3.6351099014282227,
         | 
| 553 | 
            +
                  "learning_rate": 6.341463414634146e-07,
         | 
| 554 | 
            +
                  "loss": 0.5095209777355194,
         | 
| 555 | 
            +
                  "step": 78
         | 
| 556 | 
            +
                },
         | 
| 557 | 
            +
                {
         | 
| 558 | 
            +
                  "epoch": 0.32179226069246436,
         | 
| 559 | 
            +
                  "grad_norm": 3.243072271347046,
         | 
| 560 | 
            +
                  "learning_rate": 6.422764227642276e-07,
         | 
| 561 | 
            +
                  "loss": 0.4842926263809204,
         | 
| 562 | 
            +
                  "step": 79
         | 
| 563 | 
            +
                },
         | 
| 564 | 
            +
                {
         | 
| 565 | 
            +
                  "epoch": 0.3258655804480652,
         | 
| 566 | 
            +
                  "grad_norm": 3.5646300315856934,
         | 
| 567 | 
            +
                  "learning_rate": 6.504065040650406e-07,
         | 
| 568 | 
            +
                  "loss": 0.4908552020788193,
         | 
| 569 | 
            +
                  "step": 80
         | 
| 570 | 
            +
                },
         | 
| 571 | 
            +
                {
         | 
| 572 | 
            +
                  "epoch": 0.329938900203666,
         | 
| 573 | 
            +
                  "grad_norm": 3.5380759239196777,
         | 
| 574 | 
            +
                  "learning_rate": 6.585365853658536e-07,
         | 
| 575 | 
            +
                  "loss": 0.4536065459251404,
         | 
| 576 | 
            +
                  "step": 81
         | 
| 577 | 
            +
                },
         | 
| 578 | 
            +
                {
         | 
| 579 | 
            +
                  "epoch": 0.3340122199592668,
         | 
| 580 | 
            +
                  "grad_norm": 3.128525495529175,
         | 
| 581 | 
            +
                  "learning_rate": 6.666666666666666e-07,
         | 
| 582 | 
            +
                  "loss": 0.47657161951065063,
         | 
| 583 | 
            +
                  "step": 82
         | 
| 584 | 
            +
                },
         | 
| 585 | 
            +
                {
         | 
| 586 | 
            +
                  "epoch": 0.3380855397148676,
         | 
| 587 | 
            +
                  "grad_norm": 3.3621485233306885,
         | 
| 588 | 
            +
                  "learning_rate": 6.747967479674797e-07,
         | 
| 589 | 
            +
                  "loss": 0.43791596591472626,
         | 
| 590 | 
            +
                  "step": 83
         | 
| 591 | 
            +
                },
         | 
| 592 | 
            +
                {
         | 
| 593 | 
            +
                  "epoch": 0.3421588594704684,
         | 
| 594 | 
            +
                  "grad_norm": 3.39066219329834,
         | 
| 595 | 
            +
                  "learning_rate": 6.829268292682927e-07,
         | 
| 596 | 
            +
                  "loss": 0.42947711050510406,
         | 
| 597 | 
            +
                  "step": 84
         | 
| 598 | 
            +
                },
         | 
| 599 | 
            +
                {
         | 
| 600 | 
            +
                  "epoch": 0.34623217922606925,
         | 
| 601 | 
            +
                  "grad_norm": 3.7795698642730713,
         | 
| 602 | 
            +
                  "learning_rate": 6.910569105691057e-07,
         | 
| 603 | 
            +
                  "loss": 0.4219910502433777,
         | 
| 604 | 
            +
                  "step": 85
         | 
| 605 | 
            +
                },
         | 
| 606 | 
            +
                {
         | 
| 607 | 
            +
                  "epoch": 0.35030549898167007,
         | 
| 608 | 
            +
                  "grad_norm": 3.633206367492676,
         | 
| 609 | 
            +
                  "learning_rate": 6.991869918699187e-07,
         | 
| 610 | 
            +
                  "loss": 0.4253977984189987,
         | 
| 611 | 
            +
                  "step": 86
         | 
| 612 | 
            +
                },
         | 
| 613 | 
            +
                {
         | 
| 614 | 
            +
                  "epoch": 0.3543788187372709,
         | 
| 615 | 
            +
                  "grad_norm": 3.6160175800323486,
         | 
| 616 | 
            +
                  "learning_rate": 7.073170731707316e-07,
         | 
| 617 | 
            +
                  "loss": 0.449339896440506,
         | 
| 618 | 
            +
                  "step": 87
         | 
| 619 | 
            +
                },
         | 
| 620 | 
            +
                {
         | 
| 621 | 
            +
                  "epoch": 0.35845213849287166,
         | 
| 622 | 
            +
                  "grad_norm": 3.30557918548584,
         | 
| 623 | 
            +
                  "learning_rate": 7.154471544715447e-07,
         | 
| 624 | 
            +
                  "loss": 0.45001736283302307,
         | 
| 625 | 
            +
                  "step": 88
         | 
| 626 | 
            +
                },
         | 
| 627 | 
            +
                {
         | 
| 628 | 
            +
                  "epoch": 0.3625254582484725,
         | 
| 629 | 
            +
                  "grad_norm": 3.1727640628814697,
         | 
| 630 | 
            +
                  "learning_rate": 7.235772357723577e-07,
         | 
| 631 | 
            +
                  "loss": 0.4165496975183487,
         | 
| 632 | 
            +
                  "step": 89
         | 
| 633 | 
            +
                },
         | 
| 634 | 
            +
                {
         | 
| 635 | 
            +
                  "epoch": 0.3665987780040733,
         | 
| 636 | 
            +
                  "grad_norm": 3.073976516723633,
         | 
| 637 | 
            +
                  "learning_rate": 7.317073170731707e-07,
         | 
| 638 | 
            +
                  "loss": 0.4443822205066681,
         | 
| 639 | 
            +
                  "step": 90
         | 
| 640 | 
            +
                },
         | 
| 641 | 
            +
                {
         | 
| 642 | 
            +
                  "epoch": 0.37067209775967414,
         | 
| 643 | 
            +
                  "grad_norm": 3.129105567932129,
         | 
| 644 | 
            +
                  "learning_rate": 7.398373983739837e-07,
         | 
| 645 | 
            +
                  "loss": 0.4265598952770233,
         | 
| 646 | 
            +
                  "step": 91
         | 
| 647 | 
            +
                },
         | 
| 648 | 
            +
                {
         | 
| 649 | 
            +
                  "epoch": 0.37474541751527496,
         | 
| 650 | 
            +
                  "grad_norm": 3.1485190391540527,
         | 
| 651 | 
            +
                  "learning_rate": 7.479674796747967e-07,
         | 
| 652 | 
            +
                  "loss": 0.3882734924554825,
         | 
| 653 | 
            +
                  "step": 92
         | 
| 654 | 
            +
                },
         | 
| 655 | 
            +
                {
         | 
| 656 | 
            +
                  "epoch": 0.3788187372708758,
         | 
| 657 | 
            +
                  "grad_norm": 3.1610565185546875,
         | 
| 658 | 
            +
                  "learning_rate": 7.560975609756097e-07,
         | 
| 659 | 
            +
                  "loss": 0.37010858952999115,
         | 
| 660 | 
            +
                  "step": 93
         | 
| 661 | 
            +
                },
         | 
| 662 | 
            +
                {
         | 
| 663 | 
            +
                  "epoch": 0.38289205702647655,
         | 
| 664 | 
            +
                  "grad_norm": 3.039264440536499,
         | 
| 665 | 
            +
                  "learning_rate": 7.642276422764228e-07,
         | 
| 666 | 
            +
                  "loss": 0.400989294052124,
         | 
| 667 | 
            +
                  "step": 94
         | 
| 668 | 
            +
                },
         | 
| 669 | 
            +
                {
         | 
| 670 | 
            +
                  "epoch": 0.3869653767820774,
         | 
| 671 | 
            +
                  "grad_norm": 2.9321980476379395,
         | 
| 672 | 
            +
                  "learning_rate": 7.723577235772358e-07,
         | 
| 673 | 
            +
                  "loss": 0.3771343380212784,
         | 
| 674 | 
            +
                  "step": 95
         | 
| 675 | 
            +
                },
         | 
| 676 | 
            +
                {
         | 
| 677 | 
            +
                  "epoch": 0.3910386965376782,
         | 
| 678 | 
            +
                  "grad_norm": 2.807072162628174,
         | 
| 679 | 
            +
                  "learning_rate": 7.804878048780488e-07,
         | 
| 680 | 
            +
                  "loss": 0.4001482129096985,
         | 
| 681 | 
            +
                  "step": 96
         | 
| 682 | 
            +
                },
         | 
| 683 | 
            +
                {
         | 
| 684 | 
            +
                  "epoch": 0.395112016293279,
         | 
| 685 | 
            +
                  "grad_norm": 2.8286941051483154,
         | 
| 686 | 
            +
                  "learning_rate": 7.886178861788617e-07,
         | 
| 687 | 
            +
                  "loss": 0.4234430640935898,
         | 
| 688 | 
            +
                  "step": 97
         | 
| 689 | 
            +
                },
         | 
| 690 | 
            +
                {
         | 
| 691 | 
            +
                  "epoch": 0.39918533604887985,
         | 
| 692 | 
            +
                  "grad_norm": 2.9245986938476562,
         | 
| 693 | 
            +
                  "learning_rate": 7.967479674796747e-07,
         | 
| 694 | 
            +
                  "loss": 0.3854667395353317,
         | 
| 695 | 
            +
                  "step": 98
         | 
| 696 | 
            +
                },
         | 
| 697 | 
            +
                {
         | 
| 698 | 
            +
                  "epoch": 0.40325865580448067,
         | 
| 699 | 
            +
                  "grad_norm": 3.015875816345215,
         | 
| 700 | 
            +
                  "learning_rate": 8.048780487804878e-07,
         | 
| 701 | 
            +
                  "loss": 0.38027653098106384,
         | 
| 702 | 
            +
                  "step": 99
         | 
| 703 | 
            +
                },
         | 
| 704 | 
            +
                {
         | 
| 705 | 
            +
                  "epoch": 0.4073319755600815,
         | 
| 706 | 
            +
                  "grad_norm": 2.907216787338257,
         | 
| 707 | 
            +
                  "learning_rate": 8.130081300813008e-07,
         | 
| 708 | 
            +
                  "loss": 0.34937676787376404,
         | 
| 709 | 
            +
                  "step": 100
         | 
| 710 | 
            +
                },
         | 
| 711 | 
            +
                {
         | 
| 712 | 
            +
                  "epoch": 0.41140529531568226,
         | 
| 713 | 
            +
                  "grad_norm": 3.131850004196167,
         | 
| 714 | 
            +
                  "learning_rate": 8.211382113821138e-07,
         | 
| 715 | 
            +
                  "loss": 0.4414845108985901,
         | 
| 716 | 
            +
                  "step": 101
         | 
| 717 | 
            +
                },
         | 
| 718 | 
            +
                {
         | 
| 719 | 
            +
                  "epoch": 0.4154786150712831,
         | 
| 720 | 
            +
                  "grad_norm": 2.9019775390625,
         | 
| 721 | 
            +
                  "learning_rate": 8.292682926829268e-07,
         | 
| 722 | 
            +
                  "loss": 0.3990558981895447,
         | 
| 723 | 
            +
                  "step": 102
         | 
| 724 | 
            +
                },
         | 
| 725 | 
            +
                {
         | 
| 726 | 
            +
                  "epoch": 0.4195519348268839,
         | 
| 727 | 
            +
                  "grad_norm": 2.9362523555755615,
         | 
| 728 | 
            +
                  "learning_rate": 8.373983739837398e-07,
         | 
| 729 | 
            +
                  "loss": 0.41413092613220215,
         | 
| 730 | 
            +
                  "step": 103
         | 
| 731 | 
            +
                },
         | 
| 732 | 
            +
                {
         | 
| 733 | 
            +
                  "epoch": 0.42362525458248473,
         | 
| 734 | 
            +
                  "grad_norm": 3.0895473957061768,
         | 
| 735 | 
            +
                  "learning_rate": 8.455284552845529e-07,
         | 
| 736 | 
            +
                  "loss": 0.3904542028903961,
         | 
| 737 | 
            +
                  "step": 104
         | 
| 738 | 
            +
                },
         | 
| 739 | 
            +
                {
         | 
| 740 | 
            +
                  "epoch": 0.42769857433808556,
         | 
| 741 | 
            +
                  "grad_norm": 2.9235992431640625,
         | 
| 742 | 
            +
                  "learning_rate": 8.536585365853657e-07,
         | 
| 743 | 
            +
                  "loss": 0.3995140939950943,
         | 
| 744 | 
            +
                  "step": 105
         | 
| 745 | 
            +
                },
         | 
| 746 | 
            +
                {
         | 
| 747 | 
            +
                  "epoch": 0.4317718940936864,
         | 
| 748 | 
            +
                  "grad_norm": 2.919102668762207,
         | 
| 749 | 
            +
                  "learning_rate": 8.617886178861788e-07,
         | 
| 750 | 
            +
                  "loss": 0.32857778668403625,
         | 
| 751 | 
            +
                  "step": 106
         | 
| 752 | 
            +
                },
         | 
| 753 | 
            +
                {
         | 
| 754 | 
            +
                  "epoch": 0.43584521384928715,
         | 
| 755 | 
            +
                  "grad_norm": 2.831698417663574,
         | 
| 756 | 
            +
                  "learning_rate": 8.699186991869918e-07,
         | 
| 757 | 
            +
                  "loss": 0.3507983237504959,
         | 
| 758 | 
            +
                  "step": 107
         | 
| 759 | 
            +
                },
         | 
| 760 | 
            +
                {
         | 
| 761 | 
            +
                  "epoch": 0.439918533604888,
         | 
| 762 | 
            +
                  "grad_norm": 2.952693223953247,
         | 
| 763 | 
            +
                  "learning_rate": 8.780487804878048e-07,
         | 
| 764 | 
            +
                  "loss": 0.37046514451503754,
         | 
| 765 | 
            +
                  "step": 108
         | 
| 766 | 
            +
                },
         | 
| 767 | 
            +
                {
         | 
| 768 | 
            +
                  "epoch": 0.4439918533604888,
         | 
| 769 | 
            +
                  "grad_norm": 3.315002679824829,
         | 
| 770 | 
            +
                  "learning_rate": 8.861788617886179e-07,
         | 
| 771 | 
            +
                  "loss": 0.391086682677269,
         | 
| 772 | 
            +
                  "step": 109
         | 
| 773 | 
            +
                },
         | 
| 774 | 
            +
                {
         | 
| 775 | 
            +
                  "epoch": 0.4480651731160896,
         | 
| 776 | 
            +
                  "grad_norm": 2.7241294384002686,
         | 
| 777 | 
            +
                  "learning_rate": 8.943089430894308e-07,
         | 
| 778 | 
            +
                  "loss": 0.3864188492298126,
         | 
| 779 | 
            +
                  "step": 110
         | 
| 780 | 
            +
                },
         | 
| 781 | 
            +
                {
         | 
| 782 | 
            +
                  "epoch": 0.45213849287169044,
         | 
| 783 | 
            +
                  "grad_norm": 2.782064199447632,
         | 
| 784 | 
            +
                  "learning_rate": 9.024390243902439e-07,
         | 
| 785 | 
            +
                  "loss": 0.38219109177589417,
         | 
| 786 | 
            +
                  "step": 111
         | 
| 787 | 
            +
                },
         | 
| 788 | 
            +
                {
         | 
| 789 | 
            +
                  "epoch": 0.45621181262729127,
         | 
| 790 | 
            +
                  "grad_norm": 4.001572132110596,
         | 
| 791 | 
            +
                  "learning_rate": 9.105691056910569e-07,
         | 
| 792 | 
            +
                  "loss": 0.3784598410129547,
         | 
| 793 | 
            +
                  "step": 112
         | 
| 794 | 
            +
                },
         | 
| 795 | 
            +
                {
         | 
| 796 | 
            +
                  "epoch": 0.46028513238289204,
         | 
| 797 | 
            +
                  "grad_norm": 2.607434034347534,
         | 
| 798 | 
            +
                  "learning_rate": 9.186991869918699e-07,
         | 
| 799 | 
            +
                  "loss": 0.3763512521982193,
         | 
| 800 | 
            +
                  "step": 113
         | 
| 801 | 
            +
                },
         | 
| 802 | 
            +
                {
         | 
| 803 | 
            +
                  "epoch": 0.46435845213849286,
         | 
| 804 | 
            +
                  "grad_norm": 2.97188138961792,
         | 
| 805 | 
            +
                  "learning_rate": 9.26829268292683e-07,
         | 
| 806 | 
            +
                  "loss": 0.36788034439086914,
         | 
| 807 | 
            +
                  "step": 114
         | 
| 808 | 
            +
                },
         | 
| 809 | 
            +
                {
         | 
| 810 | 
            +
                  "epoch": 0.4684317718940937,
         | 
| 811 | 
            +
                  "grad_norm": 2.9631524085998535,
         | 
| 812 | 
            +
                  "learning_rate": 9.349593495934958e-07,
         | 
| 813 | 
            +
                  "loss": 0.3696867823600769,
         | 
| 814 | 
            +
                  "step": 115
         | 
| 815 | 
            +
                },
         | 
| 816 | 
            +
                {
         | 
| 817 | 
            +
                  "epoch": 0.4725050916496945,
         | 
| 818 | 
            +
                  "grad_norm": 2.5895049571990967,
         | 
| 819 | 
            +
                  "learning_rate": 9.430894308943089e-07,
         | 
| 820 | 
            +
                  "loss": 0.3349902927875519,
         | 
| 821 | 
            +
                  "step": 116
         | 
| 822 | 
            +
                },
         | 
| 823 | 
            +
                {
         | 
| 824 | 
            +
                  "epoch": 0.47657841140529533,
         | 
| 825 | 
            +
                  "grad_norm": 2.600832462310791,
         | 
| 826 | 
            +
                  "learning_rate": 9.512195121951218e-07,
         | 
| 827 | 
            +
                  "loss": 0.34966227412223816,
         | 
| 828 | 
            +
                  "step": 117
         | 
| 829 | 
            +
                },
         | 
| 830 | 
            +
                {
         | 
| 831 | 
            +
                  "epoch": 0.48065173116089616,
         | 
| 832 | 
            +
                  "grad_norm": 3.0639443397521973,
         | 
| 833 | 
            +
                  "learning_rate": 9.59349593495935e-07,
         | 
| 834 | 
            +
                  "loss": 0.38310858607292175,
         | 
| 835 | 
            +
                  "step": 118
         | 
| 836 | 
            +
                },
         | 
| 837 | 
            +
                {
         | 
| 838 | 
            +
                  "epoch": 0.4847250509164969,
         | 
| 839 | 
            +
                  "grad_norm": 2.6944706439971924,
         | 
| 840 | 
            +
                  "learning_rate": 9.67479674796748e-07,
         | 
| 841 | 
            +
                  "loss": 0.3360476493835449,
         | 
| 842 | 
            +
                  "step": 119
         | 
| 843 | 
            +
                },
         | 
| 844 | 
            +
                {
         | 
| 845 | 
            +
                  "epoch": 0.48879837067209775,
         | 
| 846 | 
            +
                  "grad_norm": 2.8398237228393555,
         | 
| 847 | 
            +
                  "learning_rate": 9.756097560975609e-07,
         | 
| 848 | 
            +
                  "loss": 0.39176714420318604,
         | 
| 849 | 
            +
                  "step": 120
         | 
| 850 | 
            +
                },
         | 
| 851 | 
            +
                {
         | 
| 852 | 
            +
                  "epoch": 0.49287169042769857,
         | 
| 853 | 
            +
                  "grad_norm": 2.8028745651245117,
         | 
| 854 | 
            +
                  "learning_rate": 9.83739837398374e-07,
         | 
| 855 | 
            +
                  "loss": 0.37909021973609924,
         | 
| 856 | 
            +
                  "step": 121
         | 
| 857 | 
            +
                },
         | 
| 858 | 
            +
                {
         | 
| 859 | 
            +
                  "epoch": 0.4969450101832994,
         | 
| 860 | 
            +
                  "grad_norm": 2.6169185638427734,
         | 
| 861 | 
            +
                  "learning_rate": 9.918699186991869e-07,
         | 
| 862 | 
            +
                  "loss": 0.37069061398506165,
         | 
| 863 | 
            +
                  "step": 122
         | 
| 864 | 
            +
                },
         | 
| 865 | 
            +
                {
         | 
| 866 | 
            +
                  "epoch": 0.5010183299389002,
         | 
| 867 | 
            +
                  "grad_norm": 2.572046995162964,
         | 
| 868 | 
            +
                  "learning_rate": 1e-06,
         | 
| 869 | 
            +
                  "loss": 0.3428824096918106,
         | 
| 870 | 
            +
                  "step": 123
         | 
| 871 | 
            +
                },
         | 
| 872 | 
            +
                {
         | 
| 873 | 
            +
                  "epoch": 0.505091649694501,
         | 
| 874 | 
            +
                  "grad_norm": 2.7804417610168457,
         | 
| 875 | 
            +
                  "learning_rate": 9.999979682219186e-07,
         | 
| 876 | 
            +
                  "loss": 0.3680119812488556,
         | 
| 877 | 
            +
                  "step": 124
         | 
| 878 | 
            +
                },
         | 
| 879 | 
            +
                {
         | 
| 880 | 
            +
                  "epoch": 0.5091649694501018,
         | 
| 881 | 
            +
                  "grad_norm": 2.5910799503326416,
         | 
| 882 | 
            +
                  "learning_rate": 9.999918729041868e-07,
         | 
| 883 | 
            +
                  "loss": 0.33467385172843933,
         | 
| 884 | 
            +
                  "step": 125
         | 
| 885 | 
            +
                },
         | 
| 886 | 
            +
                {
         | 
| 887 | 
            +
                  "epoch": 0.5132382892057027,
         | 
| 888 | 
            +
                  "grad_norm": 2.8417587280273438,
         | 
| 889 | 
            +
                  "learning_rate": 9.999817140963419e-07,
         | 
| 890 | 
            +
                  "loss": 0.35100705921649933,
         | 
| 891 | 
            +
                  "step": 126
         | 
| 892 | 
            +
                },
         | 
| 893 | 
            +
                {
         | 
| 894 | 
            +
                  "epoch": 0.5173116089613035,
         | 
| 895 | 
            +
                  "grad_norm": 2.905728340148926,
         | 
| 896 | 
            +
                  "learning_rate": 9.999674918809457e-07,
         | 
| 897 | 
            +
                  "loss": 0.32811686396598816,
         | 
| 898 | 
            +
                  "step": 127
         | 
| 899 | 
            +
                },
         | 
| 900 | 
            +
                {
         | 
| 901 | 
            +
                  "epoch": 0.5213849287169042,
         | 
| 902 | 
            +
                  "grad_norm": 2.5878095626831055,
         | 
| 903 | 
            +
                  "learning_rate": 9.99949206373584e-07,
         | 
| 904 | 
            +
                  "loss": 0.32490645349025726,
         | 
| 905 | 
            +
                  "step": 128
         | 
| 906 | 
            +
                },
         | 
| 907 | 
            +
                {
         | 
| 908 | 
            +
                  "epoch": 0.5254582484725051,
         | 
| 909 | 
            +
                  "grad_norm": 2.9762229919433594,
         | 
| 910 | 
            +
                  "learning_rate": 9.999268577228648e-07,
         | 
| 911 | 
            +
                  "loss": 0.3934018760919571,
         | 
| 912 | 
            +
                  "step": 129
         | 
| 913 | 
            +
                },
         | 
| 914 | 
            +
                {
         | 
| 915 | 
            +
                  "epoch": 0.5295315682281059,
         | 
| 916 | 
            +
                  "grad_norm": 2.792989492416382,
         | 
| 917 | 
            +
                  "learning_rate": 9.99900446110418e-07,
         | 
| 918 | 
            +
                  "loss": 0.3315049111843109,
         | 
| 919 | 
            +
                  "step": 130
         | 
| 920 | 
            +
                },
         | 
| 921 | 
            +
                {
         | 
| 922 | 
            +
                  "epoch": 0.5336048879837068,
         | 
| 923 | 
            +
                  "grad_norm": 2.6891062259674072,
         | 
| 924 | 
            +
                  "learning_rate": 9.998699717508945e-07,
         | 
| 925 | 
            +
                  "loss": 0.3097301423549652,
         | 
| 926 | 
            +
                  "step": 131
         | 
| 927 | 
            +
                },
         | 
| 928 | 
            +
                {
         | 
| 929 | 
            +
                  "epoch": 0.5376782077393075,
         | 
| 930 | 
            +
                  "grad_norm": 2.92191481590271,
         | 
| 931 | 
            +
                  "learning_rate": 9.99835434891962e-07,
         | 
| 932 | 
            +
                  "loss": 0.34749817848205566,
         | 
| 933 | 
            +
                  "step": 132
         | 
| 934 | 
            +
                },
         | 
| 935 | 
            +
                {
         | 
| 936 | 
            +
                  "epoch": 0.5417515274949084,
         | 
| 937 | 
            +
                  "grad_norm": 2.980543851852417,
         | 
| 938 | 
            +
                  "learning_rate": 9.99796835814306e-07,
         | 
| 939 | 
            +
                  "loss": 0.3367327153682709,
         | 
| 940 | 
            +
                  "step": 133
         | 
| 941 | 
            +
                },
         | 
| 942 | 
            +
                {
         | 
| 943 | 
            +
                  "epoch": 0.5458248472505092,
         | 
| 944 | 
            +
                  "grad_norm": 2.50433611869812,
         | 
| 945 | 
            +
                  "learning_rate": 9.99754174831625e-07,
         | 
| 946 | 
            +
                  "loss": 0.3090934008359909,
         | 
| 947 | 
            +
                  "step": 134
         | 
| 948 | 
            +
                },
         | 
| 949 | 
            +
                {
         | 
| 950 | 
            +
                  "epoch": 0.5498981670061099,
         | 
| 951 | 
            +
                  "grad_norm": 2.869647979736328,
         | 
| 952 | 
            +
                  "learning_rate": 9.9970745229063e-07,
         | 
| 953 | 
            +
                  "loss": 0.35603591799736023,
         | 
| 954 | 
            +
                  "step": 135
         | 
| 955 | 
            +
                },
         | 
| 956 | 
            +
                {
         | 
| 957 | 
            +
                  "epoch": 0.5539714867617108,
         | 
| 958 | 
            +
                  "grad_norm": 2.6435837745666504,
         | 
| 959 | 
            +
                  "learning_rate": 9.9965666857104e-07,
         | 
| 960 | 
            +
                  "loss": 0.3288918733596802,
         | 
| 961 | 
            +
                  "step": 136
         | 
| 962 | 
            +
                },
         | 
| 963 | 
            +
                {
         | 
| 964 | 
            +
                  "epoch": 0.5580448065173116,
         | 
| 965 | 
            +
                  "grad_norm": 2.7970142364501953,
         | 
| 966 | 
            +
                  "learning_rate": 9.996018240855806e-07,
         | 
| 967 | 
            +
                  "loss": 0.3878723680973053,
         | 
| 968 | 
            +
                  "step": 137
         | 
| 969 | 
            +
                },
         | 
| 970 | 
            +
                {
         | 
| 971 | 
            +
                  "epoch": 0.5621181262729125,
         | 
| 972 | 
            +
                  "grad_norm": 2.593043327331543,
         | 
| 973 | 
            +
                  "learning_rate": 9.995429192799788e-07,
         | 
| 974 | 
            +
                  "loss": 0.3534126281738281,
         | 
| 975 | 
            +
                  "step": 138
         | 
| 976 | 
            +
                },
         | 
| 977 | 
            +
                {
         | 
| 978 | 
            +
                  "epoch": 0.5661914460285132,
         | 
| 979 | 
            +
                  "grad_norm": 2.8867013454437256,
         | 
| 980 | 
            +
                  "learning_rate": 9.994799546329602e-07,
         | 
| 981 | 
            +
                  "loss": 0.38061630725860596,
         | 
| 982 | 
            +
                  "step": 139
         | 
| 983 | 
            +
                },
         | 
| 984 | 
            +
                {
         | 
| 985 | 
            +
                  "epoch": 0.570264765784114,
         | 
| 986 | 
            +
                  "grad_norm": 2.589017152786255,
         | 
| 987 | 
            +
                  "learning_rate": 9.994129306562458e-07,
         | 
| 988 | 
            +
                  "loss": 0.37725748121738434,
         | 
| 989 | 
            +
                  "step": 140
         | 
| 990 | 
            +
                },
         | 
| 991 | 
            +
                {
         | 
| 992 | 
            +
                  "epoch": 0.5743380855397149,
         | 
| 993 | 
            +
                  "grad_norm": 2.369696617126465,
         | 
| 994 | 
            +
                  "learning_rate": 9.993418478945472e-07,
         | 
| 995 | 
            +
                  "loss": 0.32034583389759064,
         | 
| 996 | 
            +
                  "step": 141
         | 
| 997 | 
            +
                },
         | 
| 998 | 
            +
                {
         | 
| 999 | 
            +
                  "epoch": 0.5784114052953157,
         | 
| 1000 | 
            +
                  "grad_norm": 2.6410069465637207,
         | 
| 1001 | 
            +
                  "learning_rate": 9.992667069255618e-07,
         | 
| 1002 | 
            +
                  "loss": 0.36017628014087677,
         | 
| 1003 | 
            +
                  "step": 142
         | 
| 1004 | 
            +
                },
         | 
| 1005 | 
            +
                {
         | 
| 1006 | 
            +
                  "epoch": 0.5824847250509165,
         | 
| 1007 | 
            +
                  "grad_norm": 2.597259283065796,
         | 
| 1008 | 
            +
                  "learning_rate": 9.991875083599688e-07,
         | 
| 1009 | 
            +
                  "loss": 0.32577911019325256,
         | 
| 1010 | 
            +
                  "step": 143
         | 
| 1011 | 
            +
                },
         | 
| 1012 | 
            +
                {
         | 
| 1013 | 
            +
                  "epoch": 0.5865580448065173,
         | 
| 1014 | 
            +
                  "grad_norm": 2.761859655380249,
         | 
| 1015 | 
            +
                  "learning_rate": 9.991042528414237e-07,
         | 
| 1016 | 
            +
                  "loss": 0.33353830873966217,
         | 
| 1017 | 
            +
                  "step": 144
         | 
| 1018 | 
            +
                },
         | 
| 1019 | 
            +
                {
         | 
| 1020 | 
            +
                  "epoch": 0.5906313645621182,
         | 
| 1021 | 
            +
                  "grad_norm": 2.7634713649749756,
         | 
| 1022 | 
            +
                  "learning_rate": 9.990169410465536e-07,
         | 
| 1023 | 
            +
                  "loss": 0.33604632318019867,
         | 
| 1024 | 
            +
                  "step": 145
         | 
| 1025 | 
            +
                },
         | 
| 1026 | 
            +
                {
         | 
| 1027 | 
            +
                  "epoch": 0.594704684317719,
         | 
| 1028 | 
            +
                  "grad_norm": 2.820897340774536,
         | 
| 1029 | 
            +
                  "learning_rate": 9.98925573684951e-07,
         | 
| 1030 | 
            +
                  "loss": 0.3069554716348648,
         | 
| 1031 | 
            +
                  "step": 146
         | 
| 1032 | 
            +
                },
         | 
| 1033 | 
            +
                {
         | 
| 1034 | 
            +
                  "epoch": 0.5987780040733197,
         | 
| 1035 | 
            +
                  "grad_norm": 2.856700897216797,
         | 
| 1036 | 
            +
                  "learning_rate": 9.98830151499169e-07,
         | 
| 1037 | 
            +
                  "loss": 0.33896636962890625,
         | 
| 1038 | 
            +
                  "step": 147
         | 
| 1039 | 
            +
                },
         | 
| 1040 | 
            +
                {
         | 
| 1041 | 
            +
                  "epoch": 0.6028513238289206,
         | 
| 1042 | 
            +
                  "grad_norm": 2.9203782081604004,
         | 
| 1043 | 
            +
                  "learning_rate": 9.987306752647142e-07,
         | 
| 1044 | 
            +
                  "loss": 0.35070909559726715,
         | 
| 1045 | 
            +
                  "step": 148
         | 
| 1046 | 
            +
                },
         | 
| 1047 | 
            +
                {
         | 
| 1048 | 
            +
                  "epoch": 0.6069246435845214,
         | 
| 1049 | 
            +
                  "grad_norm": 2.679352283477783,
         | 
| 1050 | 
            +
                  "learning_rate": 9.986271457900414e-07,
         | 
| 1051 | 
            +
                  "loss": 0.3325359970331192,
         | 
| 1052 | 
            +
                  "step": 149
         | 
| 1053 | 
            +
                },
         | 
| 1054 | 
            +
                {
         | 
| 1055 | 
            +
                  "epoch": 0.6109979633401222,
         | 
| 1056 | 
            +
                  "grad_norm": 2.4953606128692627,
         | 
| 1057 | 
            +
                  "learning_rate": 9.98519563916546e-07,
         | 
| 1058 | 
            +
                  "loss": 0.32330869138240814,
         | 
| 1059 | 
            +
                  "step": 150
         | 
| 1060 | 
            +
                },
         | 
| 1061 | 
            +
                {
         | 
| 1062 | 
            +
                  "epoch": 0.615071283095723,
         | 
| 1063 | 
            +
                  "grad_norm": 2.618744134902954,
         | 
| 1064 | 
            +
                  "learning_rate": 9.98407930518558e-07,
         | 
| 1065 | 
            +
                  "loss": 0.33912393450737,
         | 
| 1066 | 
            +
                  "step": 151
         | 
| 1067 | 
            +
                },
         | 
| 1068 | 
            +
                {
         | 
| 1069 | 
            +
                  "epoch": 0.6191446028513238,
         | 
| 1070 | 
            +
                  "grad_norm": 2.6512296199798584,
         | 
| 1071 | 
            +
                  "learning_rate": 9.982922465033348e-07,
         | 
| 1072 | 
            +
                  "loss": 0.3045920431613922,
         | 
| 1073 | 
            +
                  "step": 152
         | 
| 1074 | 
            +
                },
         | 
| 1075 | 
            +
                {
         | 
| 1076 | 
            +
                  "epoch": 0.6232179226069247,
         | 
| 1077 | 
            +
                  "grad_norm": 2.7606050968170166,
         | 
| 1078 | 
            +
                  "learning_rate": 9.981725128110532e-07,
         | 
| 1079 | 
            +
                  "loss": 0.32916732132434845,
         | 
| 1080 | 
            +
                  "step": 153
         | 
| 1081 | 
            +
                },
         | 
| 1082 | 
            +
                {
         | 
| 1083 | 
            +
                  "epoch": 0.6272912423625254,
         | 
| 1084 | 
            +
                  "grad_norm": 2.95037841796875,
         | 
| 1085 | 
            +
                  "learning_rate": 9.980487304148024e-07,
         | 
| 1086 | 
            +
                  "loss": 0.36757831275463104,
         | 
| 1087 | 
            +
                  "step": 154
         | 
| 1088 | 
            +
                },
         | 
| 1089 | 
            +
                {
         | 
| 1090 | 
            +
                  "epoch": 0.6313645621181263,
         | 
| 1091 | 
            +
                  "grad_norm": 2.890489339828491,
         | 
| 1092 | 
            +
                  "learning_rate": 9.97920900320576e-07,
         | 
| 1093 | 
            +
                  "loss": 0.36117151379585266,
         | 
| 1094 | 
            +
                  "step": 155
         | 
| 1095 | 
            +
                },
         | 
| 1096 | 
            +
                {
         | 
| 1097 | 
            +
                  "epoch": 0.6354378818737271,
         | 
| 1098 | 
            +
                  "grad_norm": 2.7488858699798584,
         | 
| 1099 | 
            +
                  "learning_rate": 9.97789023567263e-07,
         | 
| 1100 | 
            +
                  "loss": 0.35026322305202484,
         | 
| 1101 | 
            +
                  "step": 156
         | 
| 1102 | 
            +
                },
         | 
| 1103 | 
            +
                {
         | 
| 1104 | 
            +
                  "epoch": 0.639511201629328,
         | 
| 1105 | 
            +
                  "grad_norm": 2.5479671955108643,
         | 
| 1106 | 
            +
                  "learning_rate": 9.976531012266413e-07,
         | 
| 1107 | 
            +
                  "loss": 0.308156818151474,
         | 
| 1108 | 
            +
                  "step": 157
         | 
| 1109 | 
            +
                },
         | 
| 1110 | 
            +
                {
         | 
| 1111 | 
            +
                  "epoch": 0.6435845213849287,
         | 
| 1112 | 
            +
                  "grad_norm": 2.717344045639038,
         | 
| 1113 | 
            +
                  "learning_rate": 9.975131344033664e-07,
         | 
| 1114 | 
            +
                  "loss": 0.29827529191970825,
         | 
| 1115 | 
            +
                  "step": 158
         | 
| 1116 | 
            +
                },
         | 
| 1117 | 
            +
                {
         | 
| 1118 | 
            +
                  "epoch": 0.6476578411405295,
         | 
| 1119 | 
            +
                  "grad_norm": 2.569551467895508,
         | 
| 1120 | 
            +
                  "learning_rate": 9.973691242349648e-07,
         | 
| 1121 | 
            +
                  "loss": 0.3232528269290924,
         | 
| 1122 | 
            +
                  "step": 159
         | 
| 1123 | 
            +
                },
         | 
| 1124 | 
            +
                {
         | 
| 1125 | 
            +
                  "epoch": 0.6517311608961304,
         | 
| 1126 | 
            +
                  "grad_norm": 3.0013420581817627,
         | 
| 1127 | 
            +
                  "learning_rate": 9.972210718918233e-07,
         | 
| 1128 | 
            +
                  "loss": 0.3270832598209381,
         | 
| 1129 | 
            +
                  "step": 160
         | 
| 1130 | 
            +
                },
         | 
| 1131 | 
            +
                {
         | 
| 1132 | 
            +
                  "epoch": 0.6558044806517311,
         | 
| 1133 | 
            +
                  "grad_norm": 2.7339162826538086,
         | 
| 1134 | 
            +
                  "learning_rate": 9.970689785771798e-07,
         | 
| 1135 | 
            +
                  "loss": 0.3668155074119568,
         | 
| 1136 | 
            +
                  "step": 161
         | 
| 1137 | 
            +
                },
         | 
| 1138 | 
            +
                {
         | 
| 1139 | 
            +
                  "epoch": 0.659877800407332,
         | 
| 1140 | 
            +
                  "grad_norm": 2.6689724922180176,
         | 
| 1141 | 
            +
                  "learning_rate": 9.969128455271137e-07,
         | 
| 1142 | 
            +
                  "loss": 0.32853490114212036,
         | 
| 1143 | 
            +
                  "step": 162
         | 
| 1144 | 
            +
                },
         | 
| 1145 | 
            +
                {
         | 
| 1146 | 
            +
                  "epoch": 0.6639511201629328,
         | 
| 1147 | 
            +
                  "grad_norm": 3.042081117630005,
         | 
| 1148 | 
            +
                  "learning_rate": 9.967526740105358e-07,
         | 
| 1149 | 
            +
                  "loss": 0.3487651199102402,
         | 
| 1150 | 
            +
                  "step": 163
         | 
| 1151 | 
            +
                },
         | 
| 1152 | 
            +
                {
         | 
| 1153 | 
            +
                  "epoch": 0.6680244399185336,
         | 
| 1154 | 
            +
                  "grad_norm": 2.4641284942626953,
         | 
| 1155 | 
            +
                  "learning_rate": 9.965884653291783e-07,
         | 
| 1156 | 
            +
                  "loss": 0.35704147815704346,
         | 
| 1157 | 
            +
                  "step": 164
         | 
| 1158 | 
            +
                },
         | 
| 1159 | 
            +
                {
         | 
| 1160 | 
            +
                  "epoch": 0.6720977596741344,
         | 
| 1161 | 
            +
                  "grad_norm": 2.6836225986480713,
         | 
| 1162 | 
            +
                  "learning_rate": 9.964202208175833e-07,
         | 
| 1163 | 
            +
                  "loss": 0.33587950468063354,
         | 
| 1164 | 
            +
                  "step": 165
         | 
| 1165 | 
            +
                },
         | 
| 1166 | 
            +
                {
         | 
| 1167 | 
            +
                  "epoch": 0.6761710794297352,
         | 
| 1168 | 
            +
                  "grad_norm": 2.2905988693237305,
         | 
| 1169 | 
            +
                  "learning_rate": 9.962479418430932e-07,
         | 
| 1170 | 
            +
                  "loss": 0.3061918318271637,
         | 
| 1171 | 
            +
                  "step": 166
         | 
| 1172 | 
            +
                },
         | 
| 1173 | 
            +
                {
         | 
| 1174 | 
            +
                  "epoch": 0.6802443991853361,
         | 
| 1175 | 
            +
                  "grad_norm": 2.4772934913635254,
         | 
| 1176 | 
            +
                  "learning_rate": 9.960716298058381e-07,
         | 
| 1177 | 
            +
                  "loss": 0.2896444499492645,
         | 
| 1178 | 
            +
                  "step": 167
         | 
| 1179 | 
            +
                },
         | 
| 1180 | 
            +
                {
         | 
| 1181 | 
            +
                  "epoch": 0.6843177189409368,
         | 
| 1182 | 
            +
                  "grad_norm": 2.6987321376800537,
         | 
| 1183 | 
            +
                  "learning_rate": 9.958912861387258e-07,
         | 
| 1184 | 
            +
                  "loss": 0.3374595195055008,
         | 
| 1185 | 
            +
                  "step": 168
         | 
| 1186 | 
            +
                },
         | 
| 1187 | 
            +
                {
         | 
| 1188 | 
            +
                  "epoch": 0.6883910386965377,
         | 
| 1189 | 
            +
                  "grad_norm": 2.6165449619293213,
         | 
| 1190 | 
            +
                  "learning_rate": 9.9570691230743e-07,
         | 
| 1191 | 
            +
                  "loss": 0.33027225732803345,
         | 
| 1192 | 
            +
                  "step": 169
         | 
| 1193 | 
            +
                },
         | 
| 1194 | 
            +
                {
         | 
| 1195 | 
            +
                  "epoch": 0.6924643584521385,
         | 
| 1196 | 
            +
                  "grad_norm": 3.1326680183410645,
         | 
| 1197 | 
            +
                  "learning_rate": 9.955185098103771e-07,
         | 
| 1198 | 
            +
                  "loss": 0.3138381540775299,
         | 
| 1199 | 
            +
                  "step": 170
         | 
| 1200 | 
            +
                },
         | 
| 1201 | 
            +
                {
         | 
| 1202 | 
            +
                  "epoch": 0.6965376782077393,
         | 
| 1203 | 
            +
                  "grad_norm": 2.5313732624053955,
         | 
| 1204 | 
            +
                  "learning_rate": 9.953260801787356e-07,
         | 
| 1205 | 
            +
                  "loss": 0.31824737787246704,
         | 
| 1206 | 
            +
                  "step": 171
         | 
| 1207 | 
            +
                },
         | 
| 1208 | 
            +
                {
         | 
| 1209 | 
            +
                  "epoch": 0.7006109979633401,
         | 
| 1210 | 
            +
                  "grad_norm": 2.529325008392334,
         | 
| 1211 | 
            +
                  "learning_rate": 9.951296249764025e-07,
         | 
| 1212 | 
            +
                  "loss": 0.298155277967453,
         | 
| 1213 | 
            +
                  "step": 172
         | 
| 1214 | 
            +
                },
         | 
| 1215 | 
            +
                {
         | 
| 1216 | 
            +
                  "epoch": 0.7046843177189409,
         | 
| 1217 | 
            +
                  "grad_norm": 2.6821744441986084,
         | 
| 1218 | 
            +
                  "learning_rate": 9.949291457999916e-07,
         | 
| 1219 | 
            +
                  "loss": 0.33296874165534973,
         | 
| 1220 | 
            +
                  "step": 173
         | 
| 1221 | 
            +
                },
         | 
| 1222 | 
            +
                {
         | 
| 1223 | 
            +
                  "epoch": 0.7087576374745418,
         | 
| 1224 | 
            +
                  "grad_norm": 2.588157892227173,
         | 
| 1225 | 
            +
                  "learning_rate": 9.947246442788193e-07,
         | 
| 1226 | 
            +
                  "loss": 0.31226691603660583,
         | 
| 1227 | 
            +
                  "step": 174
         | 
| 1228 | 
            +
                },
         | 
| 1229 | 
            +
                {
         | 
| 1230 | 
            +
                  "epoch": 0.7128309572301426,
         | 
| 1231 | 
            +
                  "grad_norm": 2.7822420597076416,
         | 
| 1232 | 
            +
                  "learning_rate": 9.945161220748927e-07,
         | 
| 1233 | 
            +
                  "loss": 0.322743222117424,
         | 
| 1234 | 
            +
                  "step": 175
         | 
| 1235 | 
            +
                },
         | 
| 1236 | 
            +
                {
         | 
| 1237 | 
            +
                  "epoch": 0.7169042769857433,
         | 
| 1238 | 
            +
                  "grad_norm": 2.379702091217041,
         | 
| 1239 | 
            +
                  "learning_rate": 9.943035808828953e-07,
         | 
| 1240 | 
            +
                  "loss": 0.3056500107049942,
         | 
| 1241 | 
            +
                  "step": 176
         | 
| 1242 | 
            +
                },
         | 
| 1243 | 
            +
                {
         | 
| 1244 | 
            +
                  "epoch": 0.7209775967413442,
         | 
| 1245 | 
            +
                  "grad_norm": 2.4450721740722656,
         | 
| 1246 | 
            +
                  "learning_rate": 9.94087022430173e-07,
         | 
| 1247 | 
            +
                  "loss": 0.3037564754486084,
         | 
| 1248 | 
            +
                  "step": 177
         | 
| 1249 | 
            +
                },
         | 
| 1250 | 
            +
                {
         | 
| 1251 | 
            +
                  "epoch": 0.725050916496945,
         | 
| 1252 | 
            +
                  "grad_norm": 2.5885887145996094,
         | 
| 1253 | 
            +
                  "learning_rate": 9.938664484767205e-07,
         | 
| 1254 | 
            +
                  "loss": 0.327587828040123,
         | 
| 1255 | 
            +
                  "step": 178
         | 
| 1256 | 
            +
                },
         | 
| 1257 | 
            +
                {
         | 
| 1258 | 
            +
                  "epoch": 0.7291242362525459,
         | 
| 1259 | 
            +
                  "grad_norm": 2.613290309906006,
         | 
| 1260 | 
            +
                  "learning_rate": 9.936418608151675e-07,
         | 
| 1261 | 
            +
                  "loss": 0.33323927223682404,
         | 
| 1262 | 
            +
                  "step": 179
         | 
| 1263 | 
            +
                },
         | 
| 1264 | 
            +
                {
         | 
| 1265 | 
            +
                  "epoch": 0.7331975560081466,
         | 
| 1266 | 
            +
                  "grad_norm": 2.6541707515716553,
         | 
| 1267 | 
            +
                  "learning_rate": 9.93413261270763e-07,
         | 
| 1268 | 
            +
                  "loss": 0.3316569924354553,
         | 
| 1269 | 
            +
                  "step": 180
         | 
| 1270 | 
            +
                },
         | 
| 1271 | 
            +
                {
         | 
| 1272 | 
            +
                  "epoch": 0.7372708757637475,
         | 
| 1273 | 
            +
                  "grad_norm": 2.646383047103882,
         | 
| 1274 | 
            +
                  "learning_rate": 9.931806517013612e-07,
         | 
| 1275 | 
            +
                  "loss": 0.35486292839050293,
         | 
| 1276 | 
            +
                  "step": 181
         | 
| 1277 | 
            +
                },
         | 
| 1278 | 
            +
                {
         | 
| 1279 | 
            +
                  "epoch": 0.7413441955193483,
         | 
| 1280 | 
            +
                  "grad_norm": 2.5270328521728516,
         | 
| 1281 | 
            +
                  "learning_rate": 9.92944033997406e-07,
         | 
| 1282 | 
            +
                  "loss": 0.3157142102718353,
         | 
| 1283 | 
            +
                  "step": 182
         | 
| 1284 | 
            +
                },
         | 
| 1285 | 
            +
                {
         | 
| 1286 | 
            +
                  "epoch": 0.745417515274949,
         | 
| 1287 | 
            +
                  "grad_norm": 2.5851869583129883,
         | 
| 1288 | 
            +
                  "learning_rate": 9.927034100819163e-07,
         | 
| 1289 | 
            +
                  "loss": 0.3013855814933777,
         | 
| 1290 | 
            +
                  "step": 183
         | 
| 1291 | 
            +
                },
         | 
| 1292 | 
            +
                {
         | 
| 1293 | 
            +
                  "epoch": 0.7494908350305499,
         | 
| 1294 | 
            +
                  "grad_norm": 2.75219988822937,
         | 
| 1295 | 
            +
                  "learning_rate": 9.924587819104695e-07,
         | 
| 1296 | 
            +
                  "loss": 0.3420049250125885,
         | 
| 1297 | 
            +
                  "step": 184
         | 
| 1298 | 
            +
                },
         | 
| 1299 | 
            +
                {
         | 
| 1300 | 
            +
                  "epoch": 0.7535641547861507,
         | 
| 1301 | 
            +
                  "grad_norm": 2.436596632003784,
         | 
| 1302 | 
            +
                  "learning_rate": 9.922101514711865e-07,
         | 
| 1303 | 
            +
                  "loss": 0.3062688261270523,
         | 
| 1304 | 
            +
                  "step": 185
         | 
| 1305 | 
            +
                },
         | 
| 1306 | 
            +
                {
         | 
| 1307 | 
            +
                  "epoch": 0.7576374745417516,
         | 
| 1308 | 
            +
                  "grad_norm": 2.9479236602783203,
         | 
| 1309 | 
            +
                  "learning_rate": 9.919575207847145e-07,
         | 
| 1310 | 
            +
                  "loss": 0.31793762743473053,
         | 
| 1311 | 
            +
                  "step": 186
         | 
| 1312 | 
            +
                },
         | 
| 1313 | 
            +
                {
         | 
| 1314 | 
            +
                  "epoch": 0.7617107942973523,
         | 
| 1315 | 
            +
                  "grad_norm": 2.5482208728790283,
         | 
| 1316 | 
            +
                  "learning_rate": 9.917008919042116e-07,
         | 
| 1317 | 
            +
                  "loss": 0.3306496888399124,
         | 
| 1318 | 
            +
                  "step": 187
         | 
| 1319 | 
            +
                },
         | 
| 1320 | 
            +
                {
         | 
| 1321 | 
            +
                  "epoch": 0.7657841140529531,
         | 
| 1322 | 
            +
                  "grad_norm": 2.609839677810669,
         | 
| 1323 | 
            +
                  "learning_rate": 9.914402669153295e-07,
         | 
| 1324 | 
            +
                  "loss": 0.29324449598789215,
         | 
| 1325 | 
            +
                  "step": 188
         | 
| 1326 | 
            +
                },
         | 
| 1327 | 
            +
                {
         | 
| 1328 | 
            +
                  "epoch": 0.769857433808554,
         | 
| 1329 | 
            +
                  "grad_norm": 2.5740039348602295,
         | 
| 1330 | 
            +
                  "learning_rate": 9.91175647936197e-07,
         | 
| 1331 | 
            +
                  "loss": 0.3193310797214508,
         | 
| 1332 | 
            +
                  "step": 189
         | 
| 1333 | 
            +
                },
         | 
| 1334 | 
            +
                {
         | 
| 1335 | 
            +
                  "epoch": 0.7739307535641547,
         | 
| 1336 | 
            +
                  "grad_norm": 2.3878629207611084,
         | 
| 1337 | 
            +
                  "learning_rate": 9.909070371174019e-07,
         | 
| 1338 | 
            +
                  "loss": 0.3040658235549927,
         | 
| 1339 | 
            +
                  "step": 190
         | 
| 1340 | 
            +
                },
         | 
| 1341 | 
            +
                {
         | 
| 1342 | 
            +
                  "epoch": 0.7780040733197556,
         | 
| 1343 | 
            +
                  "grad_norm": 2.755152463912964,
         | 
| 1344 | 
            +
                  "learning_rate": 9.906344366419746e-07,
         | 
| 1345 | 
            +
                  "loss": 0.33930477499961853,
         | 
| 1346 | 
            +
                  "step": 191
         | 
| 1347 | 
            +
                },
         | 
| 1348 | 
            +
                {
         | 
| 1349 | 
            +
                  "epoch": 0.7820773930753564,
         | 
| 1350 | 
            +
                  "grad_norm": 2.58367657661438,
         | 
| 1351 | 
            +
                  "learning_rate": 9.9035784872537e-07,
         | 
| 1352 | 
            +
                  "loss": 0.3244568109512329,
         | 
| 1353 | 
            +
                  "step": 192
         | 
| 1354 | 
            +
                },
         | 
| 1355 | 
            +
                {
         | 
| 1356 | 
            +
                  "epoch": 0.7861507128309573,
         | 
| 1357 | 
            +
                  "grad_norm": 2.350712537765503,
         | 
| 1358 | 
            +
                  "learning_rate": 9.90077275615449e-07,
         | 
| 1359 | 
            +
                  "loss": 0.2779058516025543,
         | 
| 1360 | 
            +
                  "step": 193
         | 
| 1361 | 
            +
                },
         | 
| 1362 | 
            +
                {
         | 
| 1363 | 
            +
                  "epoch": 0.790224032586558,
         | 
| 1364 | 
            +
                  "grad_norm": 2.7418465614318848,
         | 
| 1365 | 
            +
                  "learning_rate": 9.897927195924608e-07,
         | 
| 1366 | 
            +
                  "loss": 0.32641272246837616,
         | 
| 1367 | 
            +
                  "step": 194
         | 
| 1368 | 
            +
                },
         | 
| 1369 | 
            +
                {
         | 
| 1370 | 
            +
                  "epoch": 0.7942973523421588,
         | 
| 1371 | 
            +
                  "grad_norm": 2.516510009765625,
         | 
| 1372 | 
            +
                  "learning_rate": 9.895041829690238e-07,
         | 
| 1373 | 
            +
                  "loss": 0.3083319664001465,
         | 
| 1374 | 
            +
                  "step": 195
         | 
| 1375 | 
            +
                },
         | 
| 1376 | 
            +
                {
         | 
| 1377 | 
            +
                  "epoch": 0.7983706720977597,
         | 
| 1378 | 
            +
                  "grad_norm": 2.7772316932678223,
         | 
| 1379 | 
            +
                  "learning_rate": 9.892116680901084e-07,
         | 
| 1380 | 
            +
                  "loss": 0.30357757210731506,
         | 
| 1381 | 
            +
                  "step": 196
         | 
| 1382 | 
            +
                },
         | 
| 1383 | 
            +
                {
         | 
| 1384 | 
            +
                  "epoch": 0.8024439918533605,
         | 
| 1385 | 
            +
                  "grad_norm": 2.5389041900634766,
         | 
| 1386 | 
            +
                  "learning_rate": 9.88915177333015e-07,
         | 
| 1387 | 
            +
                  "loss": 0.30694054067134857,
         | 
| 1388 | 
            +
                  "step": 197
         | 
| 1389 | 
            +
                },
         | 
| 1390 | 
            +
                {
         | 
| 1391 | 
            +
                  "epoch": 0.8065173116089613,
         | 
| 1392 | 
            +
                  "grad_norm": 2.7129383087158203,
         | 
| 1393 | 
            +
                  "learning_rate": 9.886147131073579e-07,
         | 
| 1394 | 
            +
                  "loss": 0.3402569591999054,
         | 
| 1395 | 
            +
                  "step": 198
         | 
| 1396 | 
            +
                },
         | 
| 1397 | 
            +
                {
         | 
| 1398 | 
            +
                  "epoch": 0.8105906313645621,
         | 
| 1399 | 
            +
                  "grad_norm": 2.654186248779297,
         | 
| 1400 | 
            +
                  "learning_rate": 9.883102778550434e-07,
         | 
| 1401 | 
            +
                  "loss": 0.3343619704246521,
         | 
| 1402 | 
            +
                  "step": 199
         | 
| 1403 | 
            +
                },
         | 
| 1404 | 
            +
                {
         | 
| 1405 | 
            +
                  "epoch": 0.814663951120163,
         | 
| 1406 | 
            +
                  "grad_norm": 2.380168914794922,
         | 
| 1407 | 
            +
                  "learning_rate": 9.880018740502508e-07,
         | 
| 1408 | 
            +
                  "loss": 0.3020651191473007,
         | 
| 1409 | 
            +
                  "step": 200
         | 
| 1410 | 
            +
                },
         | 
| 1411 | 
            +
                {
         | 
| 1412 | 
            +
                  "epoch": 0.8187372708757638,
         | 
| 1413 | 
            +
                  "grad_norm": 2.771951198577881,
         | 
| 1414 | 
            +
                  "learning_rate": 9.876895041994127e-07,
         | 
| 1415 | 
            +
                  "loss": 0.30565840005874634,
         | 
| 1416 | 
            +
                  "step": 201
         | 
| 1417 | 
            +
                },
         | 
| 1418 | 
            +
                {
         | 
| 1419 | 
            +
                  "epoch": 0.8228105906313645,
         | 
| 1420 | 
            +
                  "grad_norm": 2.4966540336608887,
         | 
| 1421 | 
            +
                  "learning_rate": 9.873731708411939e-07,
         | 
| 1422 | 
            +
                  "loss": 0.3085058331489563,
         | 
| 1423 | 
            +
                  "step": 202
         | 
| 1424 | 
            +
                },
         | 
| 1425 | 
            +
                {
         | 
| 1426 | 
            +
                  "epoch": 0.8268839103869654,
         | 
| 1427 | 
            +
                  "grad_norm": 2.5919551849365234,
         | 
| 1428 | 
            +
                  "learning_rate": 9.870528765464711e-07,
         | 
| 1429 | 
            +
                  "loss": 0.34540820121765137,
         | 
| 1430 | 
            +
                  "step": 203
         | 
| 1431 | 
            +
                },
         | 
| 1432 | 
            +
                {
         | 
| 1433 | 
            +
                  "epoch": 0.8309572301425662,
         | 
| 1434 | 
            +
                  "grad_norm": 3.0668885707855225,
         | 
| 1435 | 
            +
                  "learning_rate": 9.867286239183122e-07,
         | 
| 1436 | 
            +
                  "loss": 0.3307037353515625,
         | 
| 1437 | 
            +
                  "step": 204
         | 
| 1438 | 
            +
                },
         | 
| 1439 | 
            +
                {
         | 
| 1440 | 
            +
                  "epoch": 0.835030549898167,
         | 
| 1441 | 
            +
                  "grad_norm": 2.4281554222106934,
         | 
| 1442 | 
            +
                  "learning_rate": 9.864004155919544e-07,
         | 
| 1443 | 
            +
                  "loss": 0.28929875791072845,
         | 
| 1444 | 
            +
                  "step": 205
         | 
| 1445 | 
            +
                },
         | 
| 1446 | 
            +
                {
         | 
| 1447 | 
            +
                  "epoch": 0.8391038696537678,
         | 
| 1448 | 
            +
                  "grad_norm": 2.5561623573303223,
         | 
| 1449 | 
            +
                  "learning_rate": 9.860682542347838e-07,
         | 
| 1450 | 
            +
                  "loss": 0.3272414803504944,
         | 
| 1451 | 
            +
                  "step": 206
         | 
| 1452 | 
            +
                },
         | 
| 1453 | 
            +
                {
         | 
| 1454 | 
            +
                  "epoch": 0.8431771894093686,
         | 
| 1455 | 
            +
                  "grad_norm": 2.824591636657715,
         | 
| 1456 | 
            +
                  "learning_rate": 9.85732142546313e-07,
         | 
| 1457 | 
            +
                  "loss": 0.3192295432090759,
         | 
| 1458 | 
            +
                  "step": 207
         | 
| 1459 | 
            +
                },
         | 
| 1460 | 
            +
                {
         | 
| 1461 | 
            +
                  "epoch": 0.8472505091649695,
         | 
| 1462 | 
            +
                  "grad_norm": 2.643718719482422,
         | 
| 1463 | 
            +
                  "learning_rate": 9.853920832581597e-07,
         | 
| 1464 | 
            +
                  "loss": 0.31284041702747345,
         | 
| 1465 | 
            +
                  "step": 208
         | 
| 1466 | 
            +
                },
         | 
| 1467 | 
            +
                {
         | 
| 1468 | 
            +
                  "epoch": 0.8513238289205702,
         | 
| 1469 | 
            +
                  "grad_norm": 2.6777195930480957,
         | 
| 1470 | 
            +
                  "learning_rate": 9.850480791340236e-07,
         | 
| 1471 | 
            +
                  "loss": 0.3136574327945709,
         | 
| 1472 | 
            +
                  "step": 209
         | 
| 1473 | 
            +
                },
         | 
| 1474 | 
            +
                {
         | 
| 1475 | 
            +
                  "epoch": 0.8553971486761711,
         | 
| 1476 | 
            +
                  "grad_norm": 2.5229766368865967,
         | 
| 1477 | 
            +
                  "learning_rate": 9.847001329696652e-07,
         | 
| 1478 | 
            +
                  "loss": 0.3047819435596466,
         | 
| 1479 | 
            +
                  "step": 210
         | 
| 1480 | 
            +
                },
         | 
| 1481 | 
            +
                {
         | 
| 1482 | 
            +
                  "epoch": 0.8594704684317719,
         | 
| 1483 | 
            +
                  "grad_norm": 2.659447431564331,
         | 
| 1484 | 
            +
                  "learning_rate": 9.843482475928818e-07,
         | 
| 1485 | 
            +
                  "loss": 0.3642407953739166,
         | 
| 1486 | 
            +
                  "step": 211
         | 
| 1487 | 
            +
                },
         | 
| 1488 | 
            +
                {
         | 
| 1489 | 
            +
                  "epoch": 0.8635437881873728,
         | 
| 1490 | 
            +
                  "grad_norm": 2.697049379348755,
         | 
| 1491 | 
            +
                  "learning_rate": 9.839924258634853e-07,
         | 
| 1492 | 
            +
                  "loss": 0.3134022653102875,
         | 
| 1493 | 
            +
                  "step": 212
         | 
| 1494 | 
            +
                },
         | 
| 1495 | 
            +
                {
         | 
| 1496 | 
            +
                  "epoch": 0.8676171079429735,
         | 
| 1497 | 
            +
                  "grad_norm": 2.629868745803833,
         | 
| 1498 | 
            +
                  "learning_rate": 9.83632670673279e-07,
         | 
| 1499 | 
            +
                  "loss": 0.306331992149353,
         | 
| 1500 | 
            +
                  "step": 213
         | 
| 1501 | 
            +
                },
         | 
| 1502 | 
            +
                {
         | 
| 1503 | 
            +
                  "epoch": 0.8716904276985743,
         | 
| 1504 | 
            +
                  "grad_norm": 2.4997003078460693,
         | 
| 1505 | 
            +
                  "learning_rate": 9.832689849460339e-07,
         | 
| 1506 | 
            +
                  "loss": 0.3142865002155304,
         | 
| 1507 | 
            +
                  "step": 214
         | 
| 1508 | 
            +
                },
         | 
| 1509 | 
            +
                {
         | 
| 1510 | 
            +
                  "epoch": 0.8757637474541752,
         | 
| 1511 | 
            +
                  "grad_norm": 2.826869010925293,
         | 
| 1512 | 
            +
                  "learning_rate": 9.829013716374647e-07,
         | 
| 1513 | 
            +
                  "loss": 0.2904099076986313,
         | 
| 1514 | 
            +
                  "step": 215
         | 
| 1515 | 
            +
                },
         | 
| 1516 | 
            +
                {
         | 
| 1517 | 
            +
                  "epoch": 0.879837067209776,
         | 
| 1518 | 
            +
                  "grad_norm": 2.6697499752044678,
         | 
| 1519 | 
            +
                  "learning_rate": 9.825298337352058e-07,
         | 
| 1520 | 
            +
                  "loss": 0.29838354885578156,
         | 
| 1521 | 
            +
                  "step": 216
         | 
| 1522 | 
            +
                },
         | 
| 1523 | 
            +
                {
         | 
| 1524 | 
            +
                  "epoch": 0.8839103869653768,
         | 
| 1525 | 
            +
                  "grad_norm": 2.5330023765563965,
         | 
| 1526 | 
            +
                  "learning_rate": 9.821543742587876e-07,
         | 
| 1527 | 
            +
                  "loss": 0.3052047789096832,
         | 
| 1528 | 
            +
                  "step": 217
         | 
| 1529 | 
            +
                },
         | 
| 1530 | 
            +
                {
         | 
| 1531 | 
            +
                  "epoch": 0.8879837067209776,
         | 
| 1532 | 
            +
                  "grad_norm": 2.806683301925659,
         | 
| 1533 | 
            +
                  "learning_rate": 9.817749962596114e-07,
         | 
| 1534 | 
            +
                  "loss": 0.3121778964996338,
         | 
| 1535 | 
            +
                  "step": 218
         | 
| 1536 | 
            +
                },
         | 
| 1537 | 
            +
                {
         | 
| 1538 | 
            +
                  "epoch": 0.8920570264765784,
         | 
| 1539 | 
            +
                  "grad_norm": 2.718122720718384,
         | 
| 1540 | 
            +
                  "learning_rate": 9.81391702820925e-07,
         | 
| 1541 | 
            +
                  "loss": 0.32955022156238556,
         | 
| 1542 | 
            +
                  "step": 219
         | 
| 1543 | 
            +
                },
         | 
| 1544 | 
            +
                {
         | 
| 1545 | 
            +
                  "epoch": 0.8961303462321792,
         | 
| 1546 | 
            +
                  "grad_norm": 2.346466541290283,
         | 
| 1547 | 
            +
                  "learning_rate": 9.81004497057797e-07,
         | 
| 1548 | 
            +
                  "loss": 0.291049063205719,
         | 
| 1549 | 
            +
                  "step": 220
         | 
| 1550 | 
            +
                },
         | 
| 1551 | 
            +
                {
         | 
| 1552 | 
            +
                  "epoch": 0.90020366598778,
         | 
| 1553 | 
            +
                  "grad_norm": 2.4048361778259277,
         | 
| 1554 | 
            +
                  "learning_rate": 9.806133821170924e-07,
         | 
| 1555 | 
            +
                  "loss": 0.30249159038066864,
         | 
| 1556 | 
            +
                  "step": 221
         | 
| 1557 | 
            +
                },
         | 
| 1558 | 
            +
                {
         | 
| 1559 | 
            +
                  "epoch": 0.9042769857433809,
         | 
| 1560 | 
            +
                  "grad_norm": 2.681546688079834,
         | 
| 1561 | 
            +
                  "learning_rate": 9.80218361177446e-07,
         | 
| 1562 | 
            +
                  "loss": 0.362154021859169,
         | 
| 1563 | 
            +
                  "step": 222
         | 
| 1564 | 
            +
                },
         | 
| 1565 | 
            +
                {
         | 
| 1566 | 
            +
                  "epoch": 0.9083503054989817,
         | 
| 1567 | 
            +
                  "grad_norm": 2.792266368865967,
         | 
| 1568 | 
            +
                  "learning_rate": 9.798194374492375e-07,
         | 
| 1569 | 
            +
                  "loss": 0.28344525396823883,
         | 
| 1570 | 
            +
                  "step": 223
         | 
| 1571 | 
            +
                },
         | 
| 1572 | 
            +
                {
         | 
| 1573 | 
            +
                  "epoch": 0.9124236252545825,
         | 
| 1574 | 
            +
                  "grad_norm": 2.507050037384033,
         | 
| 1575 | 
            +
                  "learning_rate": 9.794166141745646e-07,
         | 
| 1576 | 
            +
                  "loss": 0.2935172915458679,
         | 
| 1577 | 
            +
                  "step": 224
         | 
| 1578 | 
            +
                },
         | 
| 1579 | 
            +
                {
         | 
| 1580 | 
            +
                  "epoch": 0.9164969450101833,
         | 
| 1581 | 
            +
                  "grad_norm": 2.7160379886627197,
         | 
| 1582 | 
            +
                  "learning_rate": 9.790098946272177e-07,
         | 
| 1583 | 
            +
                  "loss": 0.3005199581384659,
         | 
| 1584 | 
            +
                  "step": 225
         | 
| 1585 | 
            +
                },
         | 
| 1586 | 
            +
                {
         | 
| 1587 | 
            +
                  "epoch": 0.9205702647657841,
         | 
| 1588 | 
            +
                  "grad_norm": 2.666494131088257,
         | 
| 1589 | 
            +
                  "learning_rate": 9.785992821126518e-07,
         | 
| 1590 | 
            +
                  "loss": 0.30710943043231964,
         | 
| 1591 | 
            +
                  "step": 226
         | 
| 1592 | 
            +
                },
         | 
| 1593 | 
            +
                {
         | 
| 1594 | 
            +
                  "epoch": 0.924643584521385,
         | 
| 1595 | 
            +
                  "grad_norm": 2.699313163757324,
         | 
| 1596 | 
            +
                  "learning_rate": 9.781847799679615e-07,
         | 
| 1597 | 
            +
                  "loss": 0.3164513558149338,
         | 
| 1598 | 
            +
                  "step": 227
         | 
| 1599 | 
            +
                },
         | 
| 1600 | 
            +
                {
         | 
| 1601 | 
            +
                  "epoch": 0.9287169042769857,
         | 
| 1602 | 
            +
                  "grad_norm": 2.49406099319458,
         | 
| 1603 | 
            +
                  "learning_rate": 9.777663915618517e-07,
         | 
| 1604 | 
            +
                  "loss": 0.3061770647764206,
         | 
| 1605 | 
            +
                  "step": 228
         | 
| 1606 | 
            +
                },
         | 
| 1607 | 
            +
                {
         | 
| 1608 | 
            +
                  "epoch": 0.9327902240325866,
         | 
| 1609 | 
            +
                  "grad_norm": 2.552093029022217,
         | 
| 1610 | 
            +
                  "learning_rate": 9.773441202946121e-07,
         | 
| 1611 | 
            +
                  "loss": 0.2973909080028534,
         | 
| 1612 | 
            +
                  "step": 229
         | 
| 1613 | 
            +
                },
         | 
| 1614 | 
            +
                {
         | 
| 1615 | 
            +
                  "epoch": 0.9368635437881874,
         | 
| 1616 | 
            +
                  "grad_norm": 2.5773231983184814,
         | 
| 1617 | 
            +
                  "learning_rate": 9.76917969598089e-07,
         | 
| 1618 | 
            +
                  "loss": 0.31120532751083374,
         | 
| 1619 | 
            +
                  "step": 230
         | 
| 1620 | 
            +
                },
         | 
| 1621 | 
            +
                {
         | 
| 1622 | 
            +
                  "epoch": 0.9409368635437881,
         | 
| 1623 | 
            +
                  "grad_norm": 2.653515100479126,
         | 
| 1624 | 
            +
                  "learning_rate": 9.76487942935657e-07,
         | 
| 1625 | 
            +
                  "loss": 0.3365926146507263,
         | 
| 1626 | 
            +
                  "step": 231
         | 
| 1627 | 
            +
                },
         | 
| 1628 | 
            +
                {
         | 
| 1629 | 
            +
                  "epoch": 0.945010183299389,
         | 
| 1630 | 
            +
                  "grad_norm": 2.670433282852173,
         | 
| 1631 | 
            +
                  "learning_rate": 9.760540438021907e-07,
         | 
| 1632 | 
            +
                  "loss": 0.3196941614151001,
         | 
| 1633 | 
            +
                  "step": 232
         | 
| 1634 | 
            +
                },
         | 
| 1635 | 
            +
                {
         | 
| 1636 | 
            +
                  "epoch": 0.9490835030549898,
         | 
| 1637 | 
            +
                  "grad_norm": 2.892035961151123,
         | 
| 1638 | 
            +
                  "learning_rate": 9.756162757240373e-07,
         | 
| 1639 | 
            +
                  "loss": 0.33982205390930176,
         | 
| 1640 | 
            +
                  "step": 233
         | 
| 1641 | 
            +
                },
         | 
| 1642 | 
            +
                {
         | 
| 1643 | 
            +
                  "epoch": 0.9531568228105907,
         | 
| 1644 | 
            +
                  "grad_norm": 2.5157856941223145,
         | 
| 1645 | 
            +
                  "learning_rate": 9.751746422589872e-07,
         | 
| 1646 | 
            +
                  "loss": 0.2537951096892357,
         | 
| 1647 | 
            +
                  "step": 234
         | 
| 1648 | 
            +
                },
         | 
| 1649 | 
            +
                {
         | 
| 1650 | 
            +
                  "epoch": 0.9572301425661914,
         | 
| 1651 | 
            +
                  "grad_norm": 2.6808388233184814,
         | 
| 1652 | 
            +
                  "learning_rate": 9.747291469962452e-07,
         | 
| 1653 | 
            +
                  "loss": 0.2846526652574539,
         | 
| 1654 | 
            +
                  "step": 235
         | 
| 1655 | 
            +
                },
         | 
| 1656 | 
            +
                {
         | 
| 1657 | 
            +
                  "epoch": 0.9613034623217923,
         | 
| 1658 | 
            +
                  "grad_norm": 2.451559066772461,
         | 
| 1659 | 
            +
                  "learning_rate": 9.742797935564011e-07,
         | 
| 1660 | 
            +
                  "loss": 0.29611095786094666,
         | 
| 1661 | 
            +
                  "step": 236
         | 
| 1662 | 
            +
                },
         | 
| 1663 | 
            +
                {
         | 
| 1664 | 
            +
                  "epoch": 0.9653767820773931,
         | 
| 1665 | 
            +
                  "grad_norm": 2.7313358783721924,
         | 
| 1666 | 
            +
                  "learning_rate": 9.738265855914012e-07,
         | 
| 1667 | 
            +
                  "loss": 0.3275996297597885,
         | 
| 1668 | 
            +
                  "step": 237
         | 
| 1669 | 
            +
                },
         | 
| 1670 | 
            +
                {
         | 
| 1671 | 
            +
                  "epoch": 0.9694501018329938,
         | 
| 1672 | 
            +
                  "grad_norm": 2.5593299865722656,
         | 
| 1673 | 
            +
                  "learning_rate": 9.733695267845171e-07,
         | 
| 1674 | 
            +
                  "loss": 0.2993656247854233,
         | 
| 1675 | 
            +
                  "step": 238
         | 
| 1676 | 
            +
                },
         | 
| 1677 | 
            +
                {
         | 
| 1678 | 
            +
                  "epoch": 0.9735234215885947,
         | 
| 1679 | 
            +
                  "grad_norm": 2.6013288497924805,
         | 
| 1680 | 
            +
                  "learning_rate": 9.729086208503173e-07,
         | 
| 1681 | 
            +
                  "loss": 0.31615155935287476,
         | 
| 1682 | 
            +
                  "step": 239
         | 
| 1683 | 
            +
                },
         | 
| 1684 | 
            +
                {
         | 
| 1685 | 
            +
                  "epoch": 0.9775967413441955,
         | 
| 1686 | 
            +
                  "grad_norm": 2.5403575897216797,
         | 
| 1687 | 
            +
                  "learning_rate": 9.72443871534636e-07,
         | 
| 1688 | 
            +
                  "loss": 0.2843424677848816,
         | 
| 1689 | 
            +
                  "step": 240
         | 
| 1690 | 
            +
                },
         | 
| 1691 | 
            +
                {
         | 
| 1692 | 
            +
                  "epoch": 0.9816700610997964,
         | 
| 1693 | 
            +
                  "grad_norm": 2.4495410919189453,
         | 
| 1694 | 
            +
                  "learning_rate": 9.719752826145432e-07,
         | 
| 1695 | 
            +
                  "loss": 0.2987358868122101,
         | 
| 1696 | 
            +
                  "step": 241
         | 
| 1697 | 
            +
                },
         | 
| 1698 | 
            +
                {
         | 
| 1699 | 
            +
                  "epoch": 0.9857433808553971,
         | 
| 1700 | 
            +
                  "grad_norm": 2.719775676727295,
         | 
| 1701 | 
            +
                  "learning_rate": 9.715028578983136e-07,
         | 
| 1702 | 
            +
                  "loss": 0.34320636093616486,
         | 
| 1703 | 
            +
                  "step": 242
         | 
| 1704 | 
            +
                },
         | 
| 1705 | 
            +
                {
         | 
| 1706 | 
            +
                  "epoch": 0.9898167006109979,
         | 
| 1707 | 
            +
                  "grad_norm": 2.7152929306030273,
         | 
| 1708 | 
            +
                  "learning_rate": 9.71026601225396e-07,
         | 
| 1709 | 
            +
                  "loss": 0.2937510758638382,
         | 
| 1710 | 
            +
                  "step": 243
         | 
| 1711 | 
            +
                },
         | 
| 1712 | 
            +
                {
         | 
| 1713 | 
            +
                  "epoch": 0.9938900203665988,
         | 
| 1714 | 
            +
                  "grad_norm": 2.4305663108825684,
         | 
| 1715 | 
            +
                  "learning_rate": 9.705465164663817e-07,
         | 
| 1716 | 
            +
                  "loss": 0.29807206988334656,
         | 
| 1717 | 
            +
                  "step": 244
         | 
| 1718 | 
            +
                },
         | 
| 1719 | 
            +
                {
         | 
| 1720 | 
            +
                  "epoch": 0.9979633401221996,
         | 
| 1721 | 
            +
                  "grad_norm": 2.322704792022705,
         | 
| 1722 | 
            +
                  "learning_rate": 9.700626075229738e-07,
         | 
| 1723 | 
            +
                  "loss": 0.3189048618078232,
         | 
| 1724 | 
            +
                  "step": 245
         | 
| 1725 | 
            +
                },
         | 
| 1726 | 
            +
                {
         | 
| 1727 | 
            +
                  "epoch": 1.0,
         | 
| 1728 | 
            +
                  "grad_norm": 2.322704792022705,
         | 
| 1729 | 
            +
                  "learning_rate": 9.695748783279544e-07,
         | 
| 1730 | 
            +
                  "loss": 0.3195984363555908,
         | 
| 1731 | 
            +
                  "step": 246
         | 
| 1732 | 
            +
                }
         | 
| 1733 | 
            +
              ],
         | 
| 1734 | 
            +
              "logging_steps": 1.0,
         | 
| 1735 | 
            +
              "max_steps": 1225,
         | 
| 1736 | 
            +
              "num_input_tokens_seen": 0,
         | 
| 1737 | 
            +
              "num_train_epochs": 5,
         | 
| 1738 | 
            +
              "save_steps": 100.0,
         | 
| 1739 | 
            +
              "stateful_callbacks": {
         | 
| 1740 | 
            +
                "TrainerControl": {
         | 
| 1741 | 
            +
                  "args": {
         | 
| 1742 | 
            +
                    "should_epoch_stop": false,
         | 
| 1743 | 
            +
                    "should_evaluate": false,
         | 
| 1744 | 
            +
                    "should_log": false,
         | 
| 1745 | 
            +
                    "should_save": true,
         | 
| 1746 | 
            +
                    "should_training_stop": false
         | 
| 1747 | 
            +
                  },
         | 
| 1748 | 
            +
                  "attributes": {}
         | 
| 1749 | 
            +
                }
         | 
| 1750 | 
            +
              },
         | 
| 1751 | 
            +
              "total_flos": 0.0,
         | 
| 1752 | 
            +
              "train_batch_size": 2,
         | 
| 1753 | 
            +
              "trial_name": null,
         | 
| 1754 | 
            +
              "trial_params": null
         | 
| 1755 | 
            +
            }
         | 
    	
        checkpoint-246/training_args.bin
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:d5ba1195b28d4eb5f6c01fc66fa88747aff65390ba93c1fc53707d292a8b581b
         | 
| 3 | 
            +
            size 7672
         | 
    	
        checkpoint-246/vocab.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-492/added_tokens.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "</tool_call>": 151658,
         | 
| 3 | 
            +
              "<tool_call>": 151657,
         | 
| 4 | 
            +
              "<|box_end|>": 151649,
         | 
| 5 | 
            +
              "<|box_start|>": 151648,
         | 
| 6 | 
            +
              "<|endoftext|>": 151643,
         | 
| 7 | 
            +
              "<|file_sep|>": 151664,
         | 
| 8 | 
            +
              "<|fim_middle|>": 151660,
         | 
| 9 | 
            +
              "<|fim_pad|>": 151662,
         | 
| 10 | 
            +
              "<|fim_prefix|>": 151659,
         | 
| 11 | 
            +
              "<|fim_suffix|>": 151661,
         | 
| 12 | 
            +
              "<|im_end|>": 151645,
         | 
| 13 | 
            +
              "<|im_start|>": 151644,
         | 
| 14 | 
            +
              "<|image_pad|>": 151655,
         | 
| 15 | 
            +
              "<|object_ref_end|>": 151647,
         | 
| 16 | 
            +
              "<|object_ref_start|>": 151646,
         | 
| 17 | 
            +
              "<|quad_end|>": 151651,
         | 
| 18 | 
            +
              "<|quad_start|>": 151650,
         | 
| 19 | 
            +
              "<|repo_name|>": 151663,
         | 
| 20 | 
            +
              "<|video_pad|>": 151656,
         | 
| 21 | 
            +
              "<|vision_end|>": 151653,
         | 
| 22 | 
            +
              "<|vision_pad|>": 151654,
         | 
| 23 | 
            +
              "<|vision_start|>": 151652
         | 
| 24 | 
            +
            }
         | 
    	
        checkpoint-492/chat_template.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
         | 
| 3 | 
            +
            }
         | 
    	
        checkpoint-492/config.json
    ADDED
    
    | @@ -0,0 +1,50 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_name_or_path": "/home/ma-user/work/haozhe/muze/models/Qwen2.5-VL-7B-Instruct",
         | 
| 3 | 
            +
              "architectures": [
         | 
| 4 | 
            +
                "Qwen2_5_VLForConditionalGeneration"
         | 
| 5 | 
            +
              ],
         | 
| 6 | 
            +
              "attention_dropout": 0.0,
         | 
| 7 | 
            +
              "bos_token_id": 151643,
         | 
| 8 | 
            +
              "eos_token_id": 151645,
         | 
| 9 | 
            +
              "hidden_act": "silu",
         | 
| 10 | 
            +
              "hidden_size": 3584,
         | 
| 11 | 
            +
              "image_token_id": 151655,
         | 
| 12 | 
            +
              "initializer_range": 0.02,
         | 
| 13 | 
            +
              "intermediate_size": 18944,
         | 
| 14 | 
            +
              "max_position_embeddings": 128000,
         | 
| 15 | 
            +
              "max_window_layers": 28,
         | 
| 16 | 
            +
              "model_type": "qwen2_5_vl",
         | 
| 17 | 
            +
              "num_attention_heads": 28,
         | 
| 18 | 
            +
              "num_hidden_layers": 28,
         | 
| 19 | 
            +
              "num_key_value_heads": 4,
         | 
| 20 | 
            +
              "rms_norm_eps": 1e-06,
         | 
| 21 | 
            +
              "rope_scaling": {
         | 
| 22 | 
            +
                "mrope_section": [
         | 
| 23 | 
            +
                  16,
         | 
| 24 | 
            +
                  24,
         | 
| 25 | 
            +
                  24
         | 
| 26 | 
            +
                ],
         | 
| 27 | 
            +
                "rope_type": "default",
         | 
| 28 | 
            +
                "type": "default"
         | 
| 29 | 
            +
              },
         | 
| 30 | 
            +
              "rope_theta": 1000000.0,
         | 
| 31 | 
            +
              "sliding_window": 32768,
         | 
| 32 | 
            +
              "tie_word_embeddings": false,
         | 
| 33 | 
            +
              "torch_dtype": "bfloat16",
         | 
| 34 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 35 | 
            +
              "use_cache": false,
         | 
| 36 | 
            +
              "use_sliding_window": false,
         | 
| 37 | 
            +
              "video_token_id": 151656,
         | 
| 38 | 
            +
              "vision_config": {
         | 
| 39 | 
            +
                "hidden_size": 1280,
         | 
| 40 | 
            +
                "in_chans": 3,
         | 
| 41 | 
            +
                "model_type": "qwen2_5_vl",
         | 
| 42 | 
            +
                "spatial_patch_size": 14,
         | 
| 43 | 
            +
                "tokens_per_second": 2,
         | 
| 44 | 
            +
                "torch_dtype": "bfloat16"
         | 
| 45 | 
            +
              },
         | 
| 46 | 
            +
              "vision_end_token_id": 151653,
         | 
| 47 | 
            +
              "vision_start_token_id": 151652,
         | 
| 48 | 
            +
              "vision_token_id": 151654,
         | 
| 49 | 
            +
              "vocab_size": 152064
         | 
| 50 | 
            +
            }
         | 
    	
        checkpoint-492/generation_config.json
    ADDED
    
    | @@ -0,0 +1,14 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "attn_implementation": "flash_attention_2",
         | 
| 3 | 
            +
              "bos_token_id": 151643,
         | 
| 4 | 
            +
              "do_sample": true,
         | 
| 5 | 
            +
              "eos_token_id": [
         | 
| 6 | 
            +
                151645,
         | 
| 7 | 
            +
                151643
         | 
| 8 | 
            +
              ],
         | 
| 9 | 
            +
              "pad_token_id": 151643,
         | 
| 10 | 
            +
              "repetition_penalty": 1.05,
         | 
| 11 | 
            +
              "temperature": 1e-06,
         | 
| 12 | 
            +
              "transformers_version": "4.50.0.dev0",
         | 
| 13 | 
            +
              "use_cache": false
         | 
| 14 | 
            +
            }
         | 
    	
        checkpoint-492/merges.txt
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        checkpoint-492/model-00001-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:dab54da621665034af8389ddfcf48837d451fe2cc29e88789b22f52e3465e880
         | 
| 3 | 
            +
            size 4968243304
         | 
    	
        checkpoint-492/model-00002-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:b88f497982e256b973744ece545b19bb9766064c261f3c5d5f4d36e2dae0d2ba
         | 
| 3 | 
            +
            size 4991495816
         | 
    	
        checkpoint-492/model-00003-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:dd24fcaebde5bc06b3127411ab92d7ada5ce8d528622bb9ab308bcf2c1a452d8
         | 
| 3 | 
            +
            size 4932751040
         | 
    	
        checkpoint-492/model-00004-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:de8e42ce7d60b868b1f86fd1b600f52074e53c833a864ea5720a7355bff28adc
         | 
| 3 | 
            +
            size 1691924384
         | 
    	
        checkpoint-492/model.safetensors.index.json
    ADDED
    
    | @@ -0,0 +1,736 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "metadata": {
         | 
| 3 | 
            +
                "total_size": 16584333312
         | 
| 4 | 
            +
              },
         | 
| 5 | 
            +
              "weight_map": {
         | 
| 6 | 
            +
                "lm_head.weight": "model-00004-of-00004.safetensors",
         | 
| 7 | 
            +
                "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
         | 
| 8 | 
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 9 | 
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 10 | 
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 11 | 
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 12 | 
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 13 | 
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 14 | 
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 15 | 
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 16 | 
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 17 | 
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 18 | 
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 19 | 
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 20 | 
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 21 | 
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 22 | 
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 23 | 
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 24 | 
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 25 | 
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 26 | 
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 27 | 
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 28 | 
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 29 | 
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 30 | 
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 31 | 
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 32 | 
            +
                "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 33 | 
            +
                "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 34 | 
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 35 | 
            +
                "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 36 | 
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 37 | 
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 38 | 
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 39 | 
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 40 | 
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 41 | 
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 42 | 
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 43 | 
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 44 | 
            +
                "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 45 | 
            +
                "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 46 | 
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 47 | 
            +
                "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 48 | 
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 49 | 
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 50 | 
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 51 | 
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 52 | 
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 53 | 
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 54 | 
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 55 | 
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 56 | 
            +
                "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 57 | 
            +
                "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 58 | 
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 59 | 
            +
                "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 60 | 
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 61 | 
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 62 | 
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 63 | 
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 64 | 
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 65 | 
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 66 | 
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 67 | 
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 68 | 
            +
                "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 69 | 
            +
                "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 70 | 
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 71 | 
            +
                "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 72 | 
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 73 | 
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 74 | 
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 75 | 
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 76 | 
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 77 | 
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 78 | 
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 79 | 
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 80 | 
            +
                "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 81 | 
            +
                "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 82 | 
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 83 | 
            +
                "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 84 | 
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 85 | 
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 86 | 
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 87 | 
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 88 | 
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 89 | 
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 90 | 
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 91 | 
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 92 | 
            +
                "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 93 | 
            +
                "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 94 | 
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 95 | 
            +
                "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 96 | 
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 97 | 
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 98 | 
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 99 | 
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 100 | 
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 101 | 
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 102 | 
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 103 | 
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 104 | 
            +
                "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 105 | 
            +
                "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 106 | 
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 107 | 
            +
                "model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 108 | 
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 109 | 
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 110 | 
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 111 | 
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 112 | 
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 113 | 
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 114 | 
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 115 | 
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 116 | 
            +
                "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 117 | 
            +
                "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 118 | 
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 119 | 
            +
                "model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 120 | 
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 121 | 
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 122 | 
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 123 | 
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 124 | 
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 125 | 
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 126 | 
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 127 | 
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 128 | 
            +
                "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 129 | 
            +
                "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 130 | 
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 131 | 
            +
                "model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 132 | 
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 133 | 
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 134 | 
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 135 | 
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 136 | 
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 137 | 
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 138 | 
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 139 | 
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 140 | 
            +
                "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 141 | 
            +
                "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 142 | 
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 143 | 
            +
                "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 144 | 
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 145 | 
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 146 | 
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 147 | 
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 148 | 
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 149 | 
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 150 | 
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 151 | 
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 152 | 
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 153 | 
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 154 | 
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 155 | 
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 156 | 
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 157 | 
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 158 | 
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 159 | 
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 160 | 
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 161 | 
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 162 | 
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 163 | 
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 164 | 
            +
                "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 165 | 
            +
                "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 166 | 
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 167 | 
            +
                "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 168 | 
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 169 | 
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 170 | 
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 171 | 
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 172 | 
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 173 | 
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 174 | 
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 175 | 
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 176 | 
            +
                "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 177 | 
            +
                "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 178 | 
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 179 | 
            +
                "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 180 | 
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 181 | 
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 182 | 
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 183 | 
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 184 | 
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 185 | 
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 186 | 
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 187 | 
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 188 | 
            +
                "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 189 | 
            +
                "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 190 | 
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 191 | 
            +
                "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 192 | 
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 193 | 
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 194 | 
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 195 | 
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 196 | 
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 197 | 
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 198 | 
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 199 | 
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 200 | 
            +
                "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 201 | 
            +
                "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 202 | 
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 203 | 
            +
                "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 204 | 
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 205 | 
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 206 | 
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 207 | 
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 208 | 
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 209 | 
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 210 | 
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 211 | 
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 212 | 
            +
                "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 213 | 
            +
                "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 214 | 
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 215 | 
            +
                "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 216 | 
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 217 | 
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 218 | 
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 219 | 
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 220 | 
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 221 | 
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 222 | 
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 223 | 
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 224 | 
            +
                "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 225 | 
            +
                "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 226 | 
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 227 | 
            +
                "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 228 | 
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 229 | 
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 230 | 
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 231 | 
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 232 | 
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 233 | 
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 234 | 
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 235 | 
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 236 | 
            +
                "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 237 | 
            +
                "model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 238 | 
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 239 | 
            +
                "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 240 | 
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 241 | 
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 242 | 
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 243 | 
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 244 | 
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 245 | 
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 246 | 
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 247 | 
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 248 | 
            +
                "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 249 | 
            +
                "model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 250 | 
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 251 | 
            +
                "model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 252 | 
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 253 | 
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 254 | 
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 255 | 
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 256 | 
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 257 | 
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 258 | 
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 259 | 
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 260 | 
            +
                "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 261 | 
            +
                "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 262 | 
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 263 | 
            +
                "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 264 | 
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 265 | 
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 266 | 
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 267 | 
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 268 | 
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 269 | 
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 270 | 
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 271 | 
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 272 | 
            +
                "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 273 | 
            +
                "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 274 | 
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 275 | 
            +
                "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 276 | 
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 277 | 
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 278 | 
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 279 | 
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 280 | 
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 281 | 
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 282 | 
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 283 | 
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 284 | 
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 285 | 
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 286 | 
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 287 | 
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 288 | 
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 289 | 
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 290 | 
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 291 | 
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 292 | 
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 293 | 
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 294 | 
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 295 | 
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 296 | 
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 297 | 
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 298 | 
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 299 | 
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 300 | 
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 301 | 
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 302 | 
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 303 | 
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 304 | 
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 305 | 
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 306 | 
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 307 | 
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 308 | 
            +
                "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 309 | 
            +
                "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 310 | 
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 311 | 
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 312 | 
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 313 | 
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 314 | 
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 315 | 
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 316 | 
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 317 | 
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 318 | 
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 319 | 
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 320 | 
            +
                "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 321 | 
            +
                "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 322 | 
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 323 | 
            +
                "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 324 | 
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 325 | 
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 326 | 
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 327 | 
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 328 | 
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 329 | 
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 330 | 
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 331 | 
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 332 | 
            +
                "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 333 | 
            +
                "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 334 | 
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 335 | 
            +
                "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 336 | 
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 337 | 
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 338 | 
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 339 | 
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 340 | 
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 341 | 
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 342 | 
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 343 | 
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 344 | 
            +
                "model.norm.weight": "model-00004-of-00004.safetensors",
         | 
| 345 | 
            +
                "visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 346 | 
            +
                "visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 347 | 
            +
                "visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 348 | 
            +
                "visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 349 | 
            +
                "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 350 | 
            +
                "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 351 | 
            +
                "visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 352 | 
            +
                "visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 353 | 
            +
                "visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 354 | 
            +
                "visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 355 | 
            +
                "visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 356 | 
            +
                "visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 357 | 
            +
                "visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 358 | 
            +
                "visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 359 | 
            +
                "visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 360 | 
            +
                "visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 361 | 
            +
                "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 362 | 
            +
                "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 363 | 
            +
                "visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 364 | 
            +
                "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 365 | 
            +
                "visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 366 | 
            +
                "visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 367 | 
            +
                "visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 368 | 
            +
                "visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 369 | 
            +
                "visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 370 | 
            +
                "visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 371 | 
            +
                "visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 372 | 
            +
                "visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 373 | 
            +
                "visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 374 | 
            +
                "visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 375 | 
            +
                "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 376 | 
            +
                "visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 377 | 
            +
                "visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 378 | 
            +
                "visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 379 | 
            +
                "visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 380 | 
            +
                "visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 381 | 
            +
                "visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 382 | 
            +
                "visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 383 | 
            +
                "visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 384 | 
            +
                "visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 385 | 
            +
                "visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 386 | 
            +
                "visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 387 | 
            +
                "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 388 | 
            +
                "visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 389 | 
            +
                "visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 390 | 
            +
                "visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 391 | 
            +
                "visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 392 | 
            +
                "visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 393 | 
            +
                "visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 394 | 
            +
                "visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 395 | 
            +
                "visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 396 | 
            +
                "visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 397 | 
            +
                "visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 398 | 
            +
                "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 399 | 
            +
                "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 400 | 
            +
                "visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 401 | 
            +
                "visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 402 | 
            +
                "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 403 | 
            +
                "visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 404 | 
            +
                "visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 405 | 
            +
                "visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 406 | 
            +
                "visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 407 | 
            +
                "visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 408 | 
            +
                "visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 409 | 
            +
                "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 410 | 
            +
                "visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 411 | 
            +
                "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 412 | 
            +
                "visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 413 | 
            +
                "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 414 | 
            +
                "visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 415 | 
            +
                "visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 416 | 
            +
                "visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 417 | 
            +
                "visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 418 | 
            +
                "visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 419 | 
            +
                "visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 420 | 
            +
                "visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 421 | 
            +
                "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 422 | 
            +
                "visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 423 | 
            +
                "visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 424 | 
            +
                "visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 425 | 
            +
                "visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 426 | 
            +
                "visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 427 | 
            +
                "visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 428 | 
            +
                "visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 429 | 
            +
                "visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 430 | 
            +
                "visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 431 | 
            +
                "visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 432 | 
            +
                "visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 433 | 
            +
                "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 434 | 
            +
                "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 435 | 
            +
                "visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 436 | 
            +
                "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 437 | 
            +
                "visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 438 | 
            +
                "visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 439 | 
            +
                "visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 440 | 
            +
                "visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 441 | 
            +
                "visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 442 | 
            +
                "visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 443 | 
            +
                "visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 444 | 
            +
                "visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 445 | 
            +
                "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 446 | 
            +
                "visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 447 | 
            +
                "visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 448 | 
            +
                "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 449 | 
            +
                "visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 450 | 
            +
                "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 451 | 
            +
                "visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 452 | 
            +
                "visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 453 | 
            +
                "visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 454 | 
            +
                "visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 455 | 
            +
                "visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 456 | 
            +
                "visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 457 | 
            +
                "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 458 | 
            +
                "visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 459 | 
            +
                "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 460 | 
            +
                "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 461 | 
            +
                "visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 462 | 
            +
                "visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 463 | 
            +
                "visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 464 | 
            +
                "visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 465 | 
            +
                "visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 466 | 
            +
                "visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 467 | 
            +
                "visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 468 | 
            +
                "visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 469 | 
            +
                "visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 470 | 
            +
                "visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 471 | 
            +
                "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 472 | 
            +
                "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 473 | 
            +
                "visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 474 | 
            +
                "visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 475 | 
            +
                "visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 476 | 
            +
                "visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 477 | 
            +
                "visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 478 | 
            +
                "visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 479 | 
            +
                "visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 480 | 
            +
                "visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 481 | 
            +
                "visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 482 | 
            +
                "visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 483 | 
            +
                "visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 484 | 
            +
                "visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 485 | 
            +
                "visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 486 | 
            +
                "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 487 | 
            +
                "visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 488 | 
            +
                "visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 489 | 
            +
                "visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 490 | 
            +
                "visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 491 | 
            +
                "visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 492 | 
            +
                "visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 493 | 
            +
                "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 494 | 
            +
                "visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 495 | 
            +
                "visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 496 | 
            +
                "visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 497 | 
            +
                "visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 498 | 
            +
                "visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 499 | 
            +
                "visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 500 | 
            +
                "visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 501 | 
            +
                "visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 502 | 
            +
                "visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 503 | 
            +
                "visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 504 | 
            +
                "visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 505 | 
            +
                "visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 506 | 
            +
                "visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 507 | 
            +
                "visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 508 | 
            +
                "visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 509 | 
            +
                "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 510 | 
            +
                "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 511 | 
            +
                "visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 512 | 
            +
                "visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 513 | 
            +
                "visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 514 | 
            +
                "visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 515 | 
            +
                "visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 516 | 
            +
                "visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 517 | 
            +
                "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 518 | 
            +
                "visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 519 | 
            +
                "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 520 | 
            +
                "visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 521 | 
            +
                "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 522 | 
            +
                "visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 523 | 
            +
                "visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 524 | 
            +
                "visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 525 | 
            +
                "visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 526 | 
            +
                "visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 527 | 
            +
                "visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 528 | 
            +
                "visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 529 | 
            +
                "visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 530 | 
            +
                "visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 531 | 
            +
                "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 532 | 
            +
                "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 533 | 
            +
                "visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 534 | 
            +
                "visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 535 | 
            +
                "visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 536 | 
            +
                "visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 537 | 
            +
                "visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 538 | 
            +
                "visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 539 | 
            +
                "visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 540 | 
            +
                "visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 541 | 
            +
                "visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 542 | 
            +
                "visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 543 | 
            +
                "visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 544 | 
            +
                "visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 545 | 
            +
                "visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 546 | 
            +
                "visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 547 | 
            +
                "visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 548 | 
            +
                "visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 549 | 
            +
                "visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 550 | 
            +
                "visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 551 | 
            +
                "visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 552 | 
            +
                "visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 553 | 
            +
                "visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 554 | 
            +
                "visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 555 | 
            +
                "visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 556 | 
            +
                "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 557 | 
            +
                "visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 558 | 
            +
                "visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 559 | 
            +
                "visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 560 | 
            +
                "visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 561 | 
            +
                "visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 562 | 
            +
                "visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 563 | 
            +
                "visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 564 | 
            +
                "visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 565 | 
            +
                "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 566 | 
            +
                "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 567 | 
            +
                "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 568 | 
            +
                "visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 569 | 
            +
                "visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 570 | 
            +
                "visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 571 | 
            +
                "visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 572 | 
            +
                "visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 573 | 
            +
                "visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 574 | 
            +
                "visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 575 | 
            +
                "visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 576 | 
            +
                "visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 577 | 
            +
                "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 578 | 
            +
                "visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 579 | 
            +
                "visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 580 | 
            +
                "visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 581 | 
            +
                "visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 582 | 
            +
                "visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 583 | 
            +
                "visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 584 | 
            +
                "visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 585 | 
            +
                "visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 586 | 
            +
                "visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 587 | 
            +
                "visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 588 | 
            +
                "visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 589 | 
            +
                "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 590 | 
            +
                "visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 591 | 
            +
                "visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 592 | 
            +
                "visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 593 | 
            +
                "visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 594 | 
            +
                "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 595 | 
            +
                "visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 596 | 
            +
                "visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 597 | 
            +
                "visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 598 | 
            +
                "visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 599 | 
            +
                "visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 600 | 
            +
                "visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 601 | 
            +
                "visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 602 | 
            +
                "visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 603 | 
            +
                "visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 604 | 
            +
                "visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 605 | 
            +
                "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 606 | 
            +
                "visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 607 | 
            +
                "visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 608 | 
            +
                "visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 609 | 
            +
                "visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 610 | 
            +
                "visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 611 | 
            +
                "visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 612 | 
            +
                "visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 613 | 
            +
                "visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 614 | 
            +
                "visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 615 | 
            +
                "visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 616 | 
            +
                "visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 617 | 
            +
                "visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 618 | 
            +
                "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 619 | 
            +
                "visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 620 | 
            +
                "visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 621 | 
            +
                "visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 622 | 
            +
                "visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 623 | 
            +
                "visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 624 | 
            +
                "visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 625 | 
            +
                "visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 626 | 
            +
                "visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 627 | 
            +
                "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 628 | 
            +
                "visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 629 | 
            +
                "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 630 | 
            +
                "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 631 | 
            +
                "visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 632 | 
            +
                "visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 633 | 
            +
                "visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 634 | 
            +
                "visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 635 | 
            +
                "visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 636 | 
            +
                "visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 637 | 
            +
                "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 638 | 
            +
                "visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 639 | 
            +
                "visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 640 | 
            +
                "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 641 | 
            +
                "visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 642 | 
            +
                "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 643 | 
            +
                "visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 644 | 
            +
                "visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 645 | 
            +
                "visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 646 | 
            +
                "visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 647 | 
            +
                "visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 648 | 
            +
                "visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 649 | 
            +
                "visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 650 | 
            +
                "visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 651 | 
            +
                "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 652 | 
            +
                "visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 653 | 
            +
                "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 654 | 
            +
                "visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 655 | 
            +
                "visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 656 | 
            +
                "visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 657 | 
            +
                "visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 658 | 
            +
                "visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 659 | 
            +
                "visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 660 | 
            +
                "visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 661 | 
            +
                "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 662 | 
            +
                "visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 663 | 
            +
                "visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 664 | 
            +
                "visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 665 | 
            +
                "visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 666 | 
            +
                "visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 667 | 
            +
                "visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 668 | 
            +
                "visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 669 | 
            +
                "visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 670 | 
            +
                "visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 671 | 
            +
                "visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 672 | 
            +
                "visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 673 | 
            +
                "visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 674 | 
            +
                "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 675 | 
            +
                "visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 676 | 
            +
                "visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 677 | 
            +
                "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 678 | 
            +
                "visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 679 | 
            +
                "visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 680 | 
            +
                "visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 681 | 
            +
                "visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 682 | 
            +
                "visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 683 | 
            +
                "visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 684 | 
            +
                "visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 685 | 
            +
                "visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 686 | 
            +
                "visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 687 | 
            +
                "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 688 | 
            +
                "visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 689 | 
            +
                "visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 690 | 
            +
                "visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 691 | 
            +
                "visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 692 | 
            +
                "visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 693 | 
            +
                "visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 694 | 
            +
                "visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 695 | 
            +
                "visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 696 | 
            +
                "visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 697 | 
            +
                "visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 698 | 
            +
                "visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 699 | 
            +
                "visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 700 | 
            +
                "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 701 | 
            +
                "visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 702 | 
            +
                "visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 703 | 
            +
                "visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 704 | 
            +
                "visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 705 | 
            +
                "visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 706 | 
            +
                "visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 707 | 
            +
                "visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 708 | 
            +
                "visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 709 | 
            +
                "visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 710 | 
            +
                "visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 711 | 
            +
                "visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 712 | 
            +
                "visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 713 | 
            +
                "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 714 | 
            +
                "visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 715 | 
            +
                "visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 716 | 
            +
                "visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 717 | 
            +
                "visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
         | 
| 718 | 
            +
                "visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
         | 
| 719 | 
            +
                "visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
         | 
| 720 | 
            +
                "visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
         | 
| 721 | 
            +
                "visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 722 | 
            +
                "visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 723 | 
            +
                "visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 724 | 
            +
                "visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 725 | 
            +
                "visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 726 | 
            +
                "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 727 | 
            +
                "visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
         | 
| 728 | 
            +
                "visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
         | 
| 729 | 
            +
                "visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
         | 
| 730 | 
            +
                "visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
         | 
| 731 | 
            +
                "visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
         | 
| 732 | 
            +
                "visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
         | 
| 733 | 
            +
                "visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
         | 
| 734 | 
            +
                "visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
         | 
| 735 | 
            +
              }
         | 
| 736 | 
            +
            }
         | 
    	
        checkpoint-492/preprocessor_config.json
    ADDED
    
    | @@ -0,0 +1,29 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "do_convert_rgb": true,
         | 
| 3 | 
            +
              "do_normalize": true,
         | 
| 4 | 
            +
              "do_rescale": true,
         | 
| 5 | 
            +
              "do_resize": true,
         | 
| 6 | 
            +
              "image_mean": [
         | 
| 7 | 
            +
                0.48145466,
         | 
| 8 | 
            +
                0.4578275,
         | 
| 9 | 
            +
                0.40821073
         | 
| 10 | 
            +
              ],
         | 
| 11 | 
            +
              "image_processor_type": "Qwen2VLImageProcessor",
         | 
| 12 | 
            +
              "image_std": [
         | 
| 13 | 
            +
                0.26862954,
         | 
| 14 | 
            +
                0.26130258,
         | 
| 15 | 
            +
                0.27577711
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "max_pixels": 4014080,
         | 
| 18 | 
            +
              "merge_size": 2,
         | 
| 19 | 
            +
              "min_pixels": 3136,
         | 
| 20 | 
            +
              "patch_size": 14,
         | 
| 21 | 
            +
              "processor_class": "Qwen2_5_VLProcessor",
         | 
| 22 | 
            +
              "resample": 3,
         | 
| 23 | 
            +
              "rescale_factor": 0.00392156862745098,
         | 
| 24 | 
            +
              "size": {
         | 
| 25 | 
            +
                "longest_edge": 12845056,
         | 
| 26 | 
            +
                "shortest_edge": 3136
         | 
| 27 | 
            +
              },
         | 
| 28 | 
            +
              "temporal_patch_size": 2
         | 
| 29 | 
            +
            }
         | 
    	
        checkpoint-492/special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,31 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "additional_special_tokens": [
         | 
| 3 | 
            +
                "<|im_start|>",
         | 
| 4 | 
            +
                "<|im_end|>",
         | 
| 5 | 
            +
                "<|object_ref_start|>",
         | 
| 6 | 
            +
                "<|object_ref_end|>",
         | 
| 7 | 
            +
                "<|box_start|>",
         | 
| 8 | 
            +
                "<|box_end|>",
         | 
| 9 | 
            +
                "<|quad_start|>",
         | 
| 10 | 
            +
                "<|quad_end|>",
         | 
| 11 | 
            +
                "<|vision_start|>",
         | 
| 12 | 
            +
                "<|vision_end|>",
         | 
| 13 | 
            +
                "<|vision_pad|>",
         | 
| 14 | 
            +
                "<|image_pad|>",
         | 
| 15 | 
            +
                "<|video_pad|>"
         | 
| 16 | 
            +
              ],
         | 
| 17 | 
            +
              "eos_token": {
         | 
| 18 | 
            +
                "content": "<|im_end|>",
         | 
| 19 | 
            +
                "lstrip": false,
         | 
| 20 | 
            +
                "normalized": false,
         | 
| 21 | 
            +
                "rstrip": false,
         | 
| 22 | 
            +
                "single_word": false
         | 
| 23 | 
            +
              },
         | 
| 24 | 
            +
              "pad_token": {
         | 
| 25 | 
            +
                "content": "<|endoftext|>",
         | 
| 26 | 
            +
                "lstrip": false,
         | 
| 27 | 
            +
                "normalized": false,
         | 
| 28 | 
            +
                "rstrip": false,
         | 
| 29 | 
            +
                "single_word": false
         | 
| 30 | 
            +
              }
         | 
| 31 | 
            +
            }
         | 
    	
        checkpoint-492/tokenizer.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
         | 
| 3 | 
            +
            size 11421896
         | 
