Upload folder using huggingface_hub
Browse files- README.md +191 -0
 - added_tokens.json +28 -0
 - config.json +27 -0
 - generation_config.json +14 -0
 - model-00001-of-00014.safetensors +3 -0
 - model-00002-of-00014.safetensors +3 -0
 - model-00003-of-00014.safetensors +3 -0
 - model-00004-of-00014.safetensors +3 -0
 - model-00005-of-00014.safetensors +3 -0
 - model-00006-of-00014.safetensors +3 -0
 - model-00007-of-00014.safetensors +3 -0
 - model-00008-of-00014.safetensors +3 -0
 - model-00009-of-00014.safetensors +3 -0
 - model-00010-of-00014.safetensors +3 -0
 - model-00011-of-00014.safetensors +3 -0
 - model-00012-of-00014.safetensors +3 -0
 - model-00013-of-00014.safetensors +3 -0
 - model-00014-of-00014.safetensors +3 -0
 - model.safetensors.index.json +778 -0
 - special_tokens_map.json +31 -0
 - tokenizer.json +0 -0
 - tokenizer_config.json +239 -0
 - vocab.json +0 -0
 
    	
        README.md
    ADDED
    
    | 
         @@ -0,0 +1,191 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            ---
         
     | 
| 2 | 
         
            +
            license: apache-2.0
         
     | 
| 3 | 
         
            +
            library_name: transformers
         
     | 
| 4 | 
         
            +
            language:
         
     | 
| 5 | 
         
            +
            - en
         
     | 
| 6 | 
         
            +
            tags:
         
     | 
| 7 | 
         
            +
            - chat
         
     | 
| 8 | 
         
            +
            - conversational
         
     | 
| 9 | 
         
            +
            base_model:
         
     | 
| 10 | 
         
            +
            - Qwen/QwQ-32B
         
     | 
| 11 | 
         
            +
            - Qwen/Qwen2.5-32B-Instruct
         
     | 
| 12 | 
         
            +
            - huihui-ai/QwQ-32B-abliterated
         
     | 
| 13 | 
         
            +
            - prithivMLmods/Sombrero-QwQ-32B-Elite10-Fixed
         
     | 
| 14 | 
         
            +
            - EVA-UNIT-01/EVA-Qwen2.5-32B-v0.2
         
     | 
| 15 | 
         
            +
            - Sao10K/32B-Qwen2.5-Kunou-v1
         
     | 
| 16 | 
         
            +
            - zetasepic/Qwen2.5-32B-Instruct-abliterated-v2
         
     | 
| 17 | 
         
            +
            - Saxo/Linkbricks-Horizon-AI-Avengers-V1-32B
         
     | 
| 18 | 
         
            +
            ---
         
     | 
| 19 | 
         
            +
             
     | 
| 20 | 
         
            +
            # Loqwqtus 2.5 32B Instruct
         
     | 
| 21 | 
         
            +
             
     | 
| 22 | 
         
            +
            Loqwqtus 2.5 32B Instruct is a *normalized denoised fourier interpolation* of the following models:
         
     | 
| 23 | 
         
            +
             
     | 
| 24 | 
         
            +
            ```yaml
         
     | 
| 25 | 
         
            +
            output_base_model: "Qwen/QwQ-32B"
         
     | 
| 26 | 
         
            +
            output_dtype: "bfloat16"
         
     | 
| 27 | 
         
            +
            finetune_merge:
         
     | 
| 28 | 
         
            +
              - { "model": "Qwen/Qwen2.5-32B-Instruct", "base": "Qwen/Qwen2.5-32B", "alpha": 1.0 }
         
     | 
| 29 | 
         
            +
              - { "model": "huihui-ai/QwQ-32B-abliterated", "base": "Qwen/QwQ-32B", "alpha": 0.9 }
         
     | 
| 30 | 
         
            +
              - { "model": "prithivMLmods/Sombrero-QwQ-32B-Elite10-Fixed", "base": "Qwen/QwQ-32B", "alpha": 0.5 }
         
     | 
| 31 | 
         
            +
              - { "model": "EVA-UNIT-01/EVA-Qwen2.5-32B-v0.2", "base": "Qwen/Qwen2.5-32B", "alpha": 0.5 }
         
     | 
| 32 | 
         
            +
              - { "model": "Sao10K/32B-Qwen2.5-Kunou-v1", "base": "Qwen/Qwen2.5-32B-Instruct", "alpha": 0.9 }
         
     | 
| 33 | 
         
            +
              - { "model": "zetasepic/Qwen2.5-32B-Instruct-abliterated-v2", "base": "Qwen/Qwen2.5-32B-Instruct", "alpha": 0.75 }
         
     | 
| 34 | 
         
            +
              - { "model": "Saxo/Linkbricks-Horizon-AI-Avengers-V1-32B", "base": "Qwen/Qwen2.5-32B-Instruct", "alpha": 0.5 }
         
     | 
| 35 | 
         
            +
            ```
         
     | 
| 36 | 
         
            +
             
     | 
| 37 | 
         
            +
            In other words, all of these models get warped and interpolated in signal space, and then jammed back on top of the base model.
         
     | 
| 38 | 
         
            +
             
     | 
| 39 | 
         
            +
            ### What is this?
         
     | 
| 40 | 
         
            +
             
     | 
| 41 | 
         
            +
            This is my first attempt to create a thinker model, in the style of the Qwentile series.
         
     | 
| 42 | 
         
            +
             
     | 
| 43 | 
         
            +
            ### Is it good?
         
     | 
| 44 | 
         
            +
             
     | 
| 45 | 
         
            +
            It is a huge overthinker, but it nailed the "If you have one bucket that holds two gallons and another bucket that holds five gallons, how do you fill one of the buckets with exactly 4 gallons?" problem; discovering two solutions and picking the most efficient one.
         
     | 
| 46 | 
         
            +
             
     | 
| 47 | 
         
            +
            The following was from a IQ4 NL test:
         
     | 
| 48 | 
         
            +
             
     | 
| 49 | 
         
            +
            ```
         
     | 
| 50 | 
         
            +
            <think>
         
     | 
| 51 | 
         
            +
            Okay, so I need to figure out how to get exactly 4 gallons using a 2-gallon and a 5-gallon bucket. Hmm, let me think. Maybe I should start by filling one of the buckets and then transferring water between them? Let me try to outline the steps.
         
     | 
| 52 | 
         
            +
             
     | 
| 53 | 
         
            +
            First, maybe fill the 5-gallon bucket completely. Then pour water from the 5-gallon into the 2-gallon until the 2-gallon is full. That should leave me with 3 gallons in the 5-gallon bucket because 5 minus 2 is 3. Then, I can empty the 2-gallon bucket. Now, I have 3 gallons in the 5-gallon and 0 in the 2-gallon. 
         
     | 
| 54 | 
         
            +
             
     | 
| 55 | 
         
            +
            Next, pour the remaining 3 gallons from the 5-gallon into the 2-gallon bucket. The 2-gallon can only take 2 gallons, so after pouring, the 2-gallon will be full, and there will be 1 gallon left in the 5-gallon. Now, empty the 2-gallon again. Now the 2-gallon is empty, and the 5-gallon has 1 gallon. 
         
     | 
| 56 | 
         
            +
             
     | 
| 57 | 
         
            +
            Then, pour the 1 gallon from the 5-gallon into the 2-gallon. Now the 2-gallon has 1 gallon, and the 5-gallon is empty. Then fill the 5-gallon completely again. Now, we have 5 gallons in the 5-gallon and 1 gallon in the 2-gallon. 
         
     | 
| 58 | 
         
            +
             
     | 
| 59 | 
         
            +
            Now, transfer water from the 5-gallon to the 2-gallon until the 2-gallon is full. Since the 2-gallon already has 1 gallon, it can only take 1 more gallon. So pouring 1 gallon from the 5-gallon into the 2-gallon leaves the 5-gallon with 4 gallons (5 -1 =4). 
         
     | 
| 60 | 
         
            +
             
     | 
| 61 | 
         
            +
            That's exactly what we wanted! So the steps are:
         
     | 
| 62 | 
         
            +
             
     | 
| 63 | 
         
            +
            1. Fill the 5-gallon bucket.
         
     | 
| 64 | 
         
            +
            2. Pour from 5 into 2 until 2 is full (leaves 3 in 5).
         
     | 
| 65 | 
         
            +
            3. Empty 2.
         
     | 
| 66 | 
         
            +
            4. Pour the remaining 3 from 5 into 2 (now 2 has 2, leaving 1 in 5).
         
     | 
| 67 | 
         
            +
            5. Empty 2 again.
         
     | 
| 68 | 
         
            +
            6. Pour the 1 from 5 into 2 (so 2 has 1, 5 empty).
         
     | 
| 69 | 
         
            +
            7. Fill 5 again.
         
     | 
| 70 | 
         
            +
            8. Pour from 5 into 2 until 2 is full (needs 1 more, so 5 gives 1, leaving 4 in 5).
         
     | 
| 71 | 
         
            +
             
     | 
| 72 | 
         
            +
            So the 5-gallon bucket now has exactly 4 gallons. Let me check each step again to make sure I didn't make a mistake.
         
     | 
| 73 | 
         
            +
             
     | 
| 74 | 
         
            +
            Alternatively, maybe there's a quicker way. Let me see. If I start by filling the 2-gallon first and do something else... Hmm. Let me try that approach.
         
     | 
| 75 | 
         
            +
             
     | 
| 76 | 
         
            +
            Start by filling the 2-gallon and pour it into the 5. Then fill the 2 again and pour into the 5 again. The 5 can hold 5 gallons, so after first pour, it has 2, then another 2 makes 4. Oh! Wait, that's simpler.
         
     | 
| 77 | 
         
            +
             
     | 
| 78 | 
         
            +
            Wait, if you fill the 2, pour into 5 (now 5 has 2). Then fill 2 again and pour into 5 again. Since 5 can only take 3 more, so after pouring 2 into it, you have 2 + 2 =4. Wait, but the 5-gallon can take up to 5, so adding another 2 would fill it to 4? Wait no, 2+2 is 4, so that's exactly 4 in the 5-gallon. Wait, that seems easier. 
         
     | 
| 79 | 
         
            +
             
     | 
| 80 | 
         
            +
            So, is that the case? Let me verify step by step:
         
     | 
| 81 | 
         
            +
             
     | 
| 82 | 
         
            +
            1. Fill the 2-gallon bucket.
         
     | 
| 83 | 
         
            +
            2. Pour it into the 5-gallon. Now, 5 has 2, 2 has 0.
         
     | 
| 84 | 
         
            +
            3. Fill the 2-gallon again.
         
     | 
| 85 | 
         
            +
            4. Pour into the 5-gallon until 5 is full. Since 5 can take only 3 more gallons (since it has 2 already), so pouring 3 gallons from the 2 would leave 2 - 3 = negative, which can't be. Wait, so maybe this approach doesn't work? Wait, the 2-gallon has 2, and the 5 has 2. If you pour from the 2 into the 5, the 5 can take 3 more. But since the 2 has only 2, you can pour all 2 into the 5, making 5 have 4, and the 2 is empty. So then that would be 4 in the 5-gallon bucket. Wait, that's even simpler! So the steps would be:
         
     | 
| 86 | 
         
            +
             
     | 
| 87 | 
         
            +
            1. Fill 2 into 5 twice. 
         
     | 
| 88 | 
         
            +
             
     | 
| 89 | 
         
            +
            Wait, but the first pour from 2 to 5 gives 2, then filling 2 again and pouring into 5 again gives total 4 in the 5, since the second pour would be 2 more. But 5 can take 5, so adding 2 twice would make 4. So that's actually the same as the second approach. 
         
     | 
| 90 | 
         
            +
             
     | 
| 91 | 
         
            +
            Wait, so why did I think of the longer method first? Maybe I confused the steps. Let me check again. 
         
     | 
| 92 | 
         
            +
             
     | 
| 93 | 
         
            +
            So the first method I thought of was by first filling the 5, pouring into 2, leaving 3, then transferring, etc. But actually the second approach is simpler. 
         
     | 
| 94 | 
         
            +
             
     | 
| 95 | 
         
            +
            Wait, but is there a catch here? Because in the second approach, if you pour the 2 into the 5 twice, you can get 4. But let's make sure:
         
     | 
| 96 | 
         
            +
             
     | 
| 97 | 
         
            +
            Start with empty buckets. 
         
     | 
| 98 | 
         
            +
             
     | 
| 99 | 
         
            +
            Step 1: Fill 2-gallon. (2:2, 5:0)
         
     | 
| 100 | 
         
            +
             
     | 
| 101 | 
         
            +
            Step 2: Pour into 5. (2:0, 5:2)
         
     | 
| 102 | 
         
            +
             
     | 
| 103 | 
         
            +
            Step 3: Fill 2 again. (2:2, 5:2)
         
     | 
| 104 | 
         
            +
             
     | 
| 105 | 
         
            +
            Step 4: Pour into 5 again. 5 can take up to 5, so adding 2 more would make 4. So pouring all 2 into 5 gives (2:0, 5:4). 
         
     | 
| 106 | 
         
            +
             
     | 
| 107 | 
         
            +
            That's exactly 4 in the 5-gallon. So that's the solution. Wait, that seems straightforward. 
         
     | 
| 108 | 
         
            +
             
     | 
| 109 | 
         
            +
            Hmm, so maybe the first method I thought of was overcomplicating it. But why did I think of that first? Maybe because sometimes you have to do that when the numbers are different. 
         
     | 
| 110 | 
         
            +
             
     | 
| 111 | 
         
            +
            Wait, but in this case, since 2 + 2 =4, which is less than 5, you can just pour twice. 
         
     | 
| 112 | 
         
            +
             
     | 
| 113 | 
         
            +
            Is there a reason this wouldn't work? Let me check again. 
         
     | 
| 114 | 
         
            +
             
     | 
| 115 | 
         
            +
            Yes, if the 5-gallon is empty, you can pour the 2 into it twice, but only if you have enough capacity. Since 2*2=4 which is less than 5, that's okay. 
         
     | 
| 116 | 
         
            +
             
     | 
| 117 | 
         
            +
            So in that case, the answer is much simpler. So why did I think of the longer method first? Maybe because in other puzzles where the target is higher than the small bucket's capacity, you need to do the pour and fill steps. 
         
     | 
| 118 | 
         
            +
             
     | 
| 119 | 
         
            +
            But in this case, it's possible to do it in fewer steps. 
         
     | 
| 120 | 
         
            +
             
     | 
| 121 | 
         
            +
            So the correct answer is: Fill the 2-gallon bucket twice into the 5-gallon, resulting in 4 gallons. 
         
     | 
| 122 | 
         
            +
             
     | 
| 123 | 
         
            +
            Wait, but let me confirm once more. 
         
     | 
| 124 | 
         
            +
             
     | 
| 125 | 
         
            +
            Alternatively, maybe the problem requires you to end up with the 4 gallons in the 5-gallon bucket, which is exactly what this does. 
         
     | 
| 126 | 
         
            +
             
     | 
| 127 | 
         
            +
            So that must be the solution. 
         
     | 
| 128 | 
         
            +
             
     | 
| 129 | 
         
            +
            Therefore, the steps are:
         
     | 
| 130 | 
         
            +
             
     | 
| 131 | 
         
            +
            1. Fill the 2-gallon bucket.
         
     | 
| 132 | 
         
            +
             
     | 
| 133 | 
         
            +
            2. Pour it into the 5-gallon.
         
     | 
| 134 | 
         
            +
             
     | 
| 135 | 
         
            +
            3. Fill the 2-gallon again.
         
     | 
| 136 | 
         
            +
             
     | 
| 137 | 
         
            +
            4. Pour from the 2-gallon into the 5-gallon until the 5-gallon is full. Since the 5 already has 2, adding 2 more would make it 4, so you pour all 2 into it, resulting in 4 in the 5-gallon. 
         
     | 
| 138 | 
         
            +
             
     | 
| 139 | 
         
            +
            Yes, that works. 
         
     | 
| 140 | 
         
            +
             
     | 
| 141 | 
         
            +
            Therefore, this is the correct solution. Maybe I overcomplicated it at first, but this seems straightforward. 
         
     | 
| 142 | 
         
            +
             
     | 
| 143 | 
         
            +
            Alternatively, another way: if you want 4 in the 2-gallon bucket? But since it's only 2 gallons, that's impossible. So the 4 must be in the 5. 
         
     | 
| 144 | 
         
            +
             
     | 
| 145 | 
         
            +
            Hence, the answer is as above. 
         
     | 
| 146 | 
         
            +
             
     | 
| 147 | 
         
            +
            So the steps are:
         
     | 
| 148 | 
         
            +
             
     | 
| 149 | 
         
            +
            - Fill the 2-gallon twice into the 5-gallon, resulting in 4 gallons in the 5.
         
     | 
| 150 | 
         
            +
             
     | 
| 151 | 
         
            +
            I think that's the correct answer.
         
     | 
| 152 | 
         
            +
            </think>
         
     | 
| 153 | 
         
            +
             
     | 
| 154 | 
         
            +
            To measure exactly 4 gallons using a 2-gallon and a 5-gallon bucket, follow these steps:
         
     | 
| 155 | 
         
            +
             
     | 
| 156 | 
         
            +
            1. **Fill the 2-gallon bucket completely.**  
         
     | 
| 157 | 
         
            +
               (2-gallon: 2, 5-gallon: 0)
         
     | 
| 158 | 
         
            +
             
     | 
| 159 | 
         
            +
            2. **Pour the water from the 2-gallon into the 5-gallon bucket.**  
         
     | 
| 160 | 
         
            +
               (2-gallon: 0, 5-gallon: 2)
         
     | 
| 161 | 
         
            +
             
     | 
| 162 | 
         
            +
            3. **Fill the 2-gallon bucket again.**  
         
     | 
| 163 | 
         
            +
               (2-gallon: 2, 5-gallon: 2)
         
     | 
| 164 | 
         
            +
             
     | 
| 165 | 
         
            +
            4. **Pour water from the 2-gallon into the 5-gallon until the 5-gallon is full.**  
         
     | 
| 166 | 
         
            +
               Since the 5-gallon already has 2 gallons, it can only take **3 more gallons**. However, the 2-gallon only has 2 gallons, so you pour all of it into the 5-gallon. This results in:  
         
     | 
| 167 | 
         
            +
               (2-gallon: 0, 5-gallon: **4**)
         
     | 
| 168 | 
         
            +
             
     | 
| 169 | 
         
            +
            You now have **exactly 4 gallons** in the 5-gallon bucket. 
         
     | 
| 170 | 
         
            +
             
     | 
| 171 | 
         
            +
            **Final Answer:**  
         
     | 
| 172 | 
         
            +
            Fill the 2-gallon bucket twice into the 5-gallon bucket, leaving **4 gallons** in the 5-gallon bucket.
         
     | 
| 173 | 
         
            +
            ```
         
     | 
| 174 | 
         
            +
             
     | 
| 175 | 
         
            +
            ## Censored
         
     | 
| 176 | 
         
            +
             
     | 
| 177 | 
         
            +
            This model is lightly censored by default, but can be easily overridden by tweaking the system prompt.
         
     | 
| 178 | 
         
            +
             
     | 
| 179 | 
         
            +
            ## Citation
         
     | 
| 180 | 
         
            +
             
     | 
| 181 | 
         
            +
            If you find our work helpful, feel free to give us a cite.
         
     | 
| 182 | 
         
            +
             
     | 
| 183 | 
         
            +
            ```
         
     | 
| 184 | 
         
            +
            @misc{loqwqtus2.5-32b-instruct,
         
     | 
| 185 | 
         
            +
                title = {Loqwqtus 2.5 32B Instruct},
         
     | 
| 186 | 
         
            +
                url = {https://huggingface.co/maldv/Loqwqtus2.5-32B-Instruct},
         
     | 
| 187 | 
         
            +
                author = {Praxis Maldevide},
         
     | 
| 188 | 
         
            +
                month = {March},
         
     | 
| 189 | 
         
            +
                year = {2025}
         
     | 
| 190 | 
         
            +
            }
         
     | 
| 191 | 
         
            +
            ```
         
     | 
    	
        added_tokens.json
    ADDED
    
    | 
         @@ -0,0 +1,28 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "</think>": 151668,
         
     | 
| 3 | 
         
            +
              "</tool_call>": 151658,
         
     | 
| 4 | 
         
            +
              "</tool_response>": 151666,
         
     | 
| 5 | 
         
            +
              "<think>": 151667,
         
     | 
| 6 | 
         
            +
              "<tool_call>": 151657,
         
     | 
| 7 | 
         
            +
              "<tool_response>": 151665,
         
     | 
| 8 | 
         
            +
              "<|box_end|>": 151649,
         
     | 
| 9 | 
         
            +
              "<|box_start|>": 151648,
         
     | 
| 10 | 
         
            +
              "<|endoftext|>": 151643,
         
     | 
| 11 | 
         
            +
              "<|file_sep|>": 151664,
         
     | 
| 12 | 
         
            +
              "<|fim_middle|>": 151660,
         
     | 
| 13 | 
         
            +
              "<|fim_pad|>": 151662,
         
     | 
| 14 | 
         
            +
              "<|fim_prefix|>": 151659,
         
     | 
| 15 | 
         
            +
              "<|fim_suffix|>": 151661,
         
     | 
| 16 | 
         
            +
              "<|im_end|>": 151645,
         
     | 
| 17 | 
         
            +
              "<|im_start|>": 151644,
         
     | 
| 18 | 
         
            +
              "<|image_pad|>": 151655,
         
     | 
| 19 | 
         
            +
              "<|object_ref_end|>": 151647,
         
     | 
| 20 | 
         
            +
              "<|object_ref_start|>": 151646,
         
     | 
| 21 | 
         
            +
              "<|quad_end|>": 151651,
         
     | 
| 22 | 
         
            +
              "<|quad_start|>": 151650,
         
     | 
| 23 | 
         
            +
              "<|repo_name|>": 151663,
         
     | 
| 24 | 
         
            +
              "<|video_pad|>": 151656,
         
     | 
| 25 | 
         
            +
              "<|vision_end|>": 151653,
         
     | 
| 26 | 
         
            +
              "<|vision_pad|>": 151654,
         
     | 
| 27 | 
         
            +
              "<|vision_start|>": 151652
         
     | 
| 28 | 
         
            +
            }
         
     | 
    	
        config.json
    ADDED
    
    | 
         @@ -0,0 +1,27 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "architectures": [
         
     | 
| 3 | 
         
            +
                "Qwen2ForCausalLM"
         
     | 
| 4 | 
         
            +
              ],
         
     | 
| 5 | 
         
            +
              "attention_dropout": 0.0,
         
     | 
| 6 | 
         
            +
              "bos_token_id": 151643,
         
     | 
| 7 | 
         
            +
              "eos_token_id": 151645,
         
     | 
| 8 | 
         
            +
              "hidden_act": "silu",
         
     | 
| 9 | 
         
            +
              "hidden_size": 5120,
         
     | 
| 10 | 
         
            +
              "initializer_range": 0.02,
         
     | 
| 11 | 
         
            +
              "intermediate_size": 27648,
         
     | 
| 12 | 
         
            +
              "max_position_embeddings": 40960,
         
     | 
| 13 | 
         
            +
              "max_window_layers": 64,
         
     | 
| 14 | 
         
            +
              "model_type": "qwen2",
         
     | 
| 15 | 
         
            +
              "num_attention_heads": 40,
         
     | 
| 16 | 
         
            +
              "num_hidden_layers": 64,
         
     | 
| 17 | 
         
            +
              "num_key_value_heads": 8,
         
     | 
| 18 | 
         
            +
              "rms_norm_eps": 1e-05,
         
     | 
| 19 | 
         
            +
              "rope_theta": 1000000.0,
         
     | 
| 20 | 
         
            +
              "sliding_window": 32768,
         
     | 
| 21 | 
         
            +
              "tie_word_embeddings": false,
         
     | 
| 22 | 
         
            +
              "torch_dtype": "bfloat16",
         
     | 
| 23 | 
         
            +
              "transformers_version": "4.43.1",
         
     | 
| 24 | 
         
            +
              "use_cache": true,
         
     | 
| 25 | 
         
            +
              "use_sliding_window": false,
         
     | 
| 26 | 
         
            +
              "vocab_size": 152064
         
     | 
| 27 | 
         
            +
            }
         
     | 
    	
        generation_config.json
    ADDED
    
    | 
         @@ -0,0 +1,14 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "bos_token_id": 151643,
         
     | 
| 3 | 
         
            +
              "do_sample": true,
         
     | 
| 4 | 
         
            +
              "eos_token_id": [
         
     | 
| 5 | 
         
            +
                151645,
         
     | 
| 6 | 
         
            +
                151643
         
     | 
| 7 | 
         
            +
              ],
         
     | 
| 8 | 
         
            +
              "pad_token_id": 151643,
         
     | 
| 9 | 
         
            +
              "repetition_penalty": 1.0,
         
     | 
| 10 | 
         
            +
              "temperature": 0.6,
         
     | 
| 11 | 
         
            +
              "top_k": 40,
         
     | 
| 12 | 
         
            +
              "top_p": 0.95,
         
     | 
| 13 | 
         
            +
              "transformers_version": "4.45.2"
         
     | 
| 14 | 
         
            +
            }
         
     | 
    	
        model-00001-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:8a5ee6adf5456c4f3774dc72ea43ec024e2a54dddfb264b39d941a498a1ff737
         
     | 
| 3 | 
         
            +
            size 4891730992
         
     | 
    	
        model-00002-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:71333d31c68186156beeecbf1efd1dcd8076316d45505d42d55b582eee9b0d15
         
     | 
| 3 | 
         
            +
            size 4876059352
         
     | 
    	
        model-00003-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:5a57aa5b7b4047344d55ecfa8613ba8c6270ee88f86dbad1afb9d72e7191d3a0
         
     | 
| 3 | 
         
            +
            size 4876059384
         
     | 
    	
        model-00004-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:b1345fca63862b4c1d43ff2fdb89f30c681920b4a69df7253e7fe455debc031f
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00005-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:6d56ba9f640758ff2ec7589254bca0393b2b5f85fc37fc25a0f5e957db2d4c16
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00006-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:73c3dac3a205f3d8e18c2ee71e1374afe7cd9a714906207fc5a0cd324db547c9
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00007-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:e99ff0f9a98507c46ef3937754098fff10072baaf644d6648bfd5fc7b1fdc236
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00008-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:cbb5e66f3c7fc2712420ba3065c67b8172be90218333eb4c93996cbc9be17abf
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00009-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:43aeb9ae5334077cd2f1c533fd6378cf861e5d333e5597738ffde83a4c815a27
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00010-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:3dcd6287c43e0e784c6e48a213ed734f4bcaad3ec115a57ecea126b62e3675ff
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00011-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:37d15d773da41467137a41c824cb465071a174d1cd5b5c502f6e50c377ee3f6a
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00012-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:43bcd662ff60b09f420b07cf2a1c41da30ae93534aeb83a66a919c5511af6093
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00013-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:275df03c0726a72a1d7c2355aa4c2658bbc4e9723e6011cc5384565126b4a190
         
     | 
| 3 | 
         
            +
            size 4876059416
         
     | 
    	
        model-00014-of-00014.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:61ea4bd81125cb6ed622528e9e5a6ed87b7fecabd83ce5007f0d3043e7ac27bd
         
     | 
| 3 | 
         
            +
            size 2123397800
         
     | 
    	
        model.safetensors.index.json
    ADDED
    
    | 
         @@ -0,0 +1,778 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "metadata": {
         
     | 
| 3 | 
         
            +
                "total_size": 65527752704
         
     | 
| 4 | 
         
            +
              },
         
     | 
| 5 | 
         
            +
              "weight_map": {
         
     | 
| 6 | 
         
            +
                "lm_head.weight": "model-00014-of-00014.safetensors",
         
     | 
| 7 | 
         
            +
                "model.embed_tokens.weight": "model-00001-of-00014.safetensors",
         
     | 
| 8 | 
         
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 9 | 
         
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 10 | 
         
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 11 | 
         
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 12 | 
         
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 13 | 
         
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 14 | 
         
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 15 | 
         
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 16 | 
         
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 17 | 
         
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 18 | 
         
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 19 | 
         
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 20 | 
         
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 21 | 
         
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 22 | 
         
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 23 | 
         
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 24 | 
         
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 25 | 
         
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 26 | 
         
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 27 | 
         
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 28 | 
         
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 29 | 
         
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 30 | 
         
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 31 | 
         
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 32 | 
         
            +
                "model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 33 | 
         
            +
                "model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 34 | 
         
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 35 | 
         
            +
                "model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 36 | 
         
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 37 | 
         
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 38 | 
         
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 39 | 
         
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 40 | 
         
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 41 | 
         
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 42 | 
         
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 43 | 
         
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 44 | 
         
            +
                "model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 45 | 
         
            +
                "model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 46 | 
         
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 47 | 
         
            +
                "model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 48 | 
         
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 49 | 
         
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 50 | 
         
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 51 | 
         
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 52 | 
         
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 53 | 
         
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 54 | 
         
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 55 | 
         
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 56 | 
         
            +
                "model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 57 | 
         
            +
                "model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 58 | 
         
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 59 | 
         
            +
                "model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 60 | 
         
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 61 | 
         
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 62 | 
         
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 63 | 
         
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 64 | 
         
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 65 | 
         
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 66 | 
         
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 67 | 
         
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 68 | 
         
            +
                "model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 69 | 
         
            +
                "model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 70 | 
         
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 71 | 
         
            +
                "model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 72 | 
         
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 73 | 
         
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 74 | 
         
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 75 | 
         
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 76 | 
         
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 77 | 
         
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 78 | 
         
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 79 | 
         
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 80 | 
         
            +
                "model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 81 | 
         
            +
                "model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 82 | 
         
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 83 | 
         
            +
                "model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 84 | 
         
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 85 | 
         
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 86 | 
         
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 87 | 
         
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 88 | 
         
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 89 | 
         
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 90 | 
         
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 91 | 
         
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 92 | 
         
            +
                "model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 93 | 
         
            +
                "model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 94 | 
         
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 95 | 
         
            +
                "model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 96 | 
         
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 97 | 
         
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 98 | 
         
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 99 | 
         
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 100 | 
         
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 101 | 
         
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 102 | 
         
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 103 | 
         
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 104 | 
         
            +
                "model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 105 | 
         
            +
                "model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 106 | 
         
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 107 | 
         
            +
                "model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 108 | 
         
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 109 | 
         
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 110 | 
         
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 111 | 
         
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 112 | 
         
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 113 | 
         
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 114 | 
         
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 115 | 
         
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 116 | 
         
            +
                "model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 117 | 
         
            +
                "model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 118 | 
         
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 119 | 
         
            +
                "model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 120 | 
         
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         
     | 
| 121 | 
         
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 122 | 
         
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 123 | 
         
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 124 | 
         
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 125 | 
         
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 126 | 
         
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 127 | 
         
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 128 | 
         
            +
                "model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 129 | 
         
            +
                "model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 130 | 
         
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 131 | 
         
            +
                "model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 132 | 
         
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 133 | 
         
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 134 | 
         
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 135 | 
         
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 136 | 
         
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 137 | 
         
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 138 | 
         
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         
     | 
| 139 | 
         
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         
     | 
| 140 | 
         
            +
                "model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 141 | 
         
            +
                "model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 142 | 
         
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 143 | 
         
            +
                "model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 144 | 
         
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 145 | 
         
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 146 | 
         
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 147 | 
         
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 148 | 
         
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 149 | 
         
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 150 | 
         
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 151 | 
         
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 152 | 
         
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 153 | 
         
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 154 | 
         
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 155 | 
         
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 156 | 
         
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         
     | 
| 157 | 
         
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 158 | 
         
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 159 | 
         
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 160 | 
         
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 161 | 
         
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 162 | 
         
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 163 | 
         
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 164 | 
         
            +
                "model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 165 | 
         
            +
                "model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 166 | 
         
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 167 | 
         
            +
                "model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 168 | 
         
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 169 | 
         
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 170 | 
         
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 171 | 
         
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 172 | 
         
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 173 | 
         
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 174 | 
         
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 175 | 
         
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 176 | 
         
            +
                "model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 177 | 
         
            +
                "model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 178 | 
         
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 179 | 
         
            +
                "model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 180 | 
         
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 181 | 
         
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 182 | 
         
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 183 | 
         
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 184 | 
         
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 185 | 
         
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 186 | 
         
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 187 | 
         
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 188 | 
         
            +
                "model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 189 | 
         
            +
                "model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 190 | 
         
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 191 | 
         
            +
                "model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 192 | 
         
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         
     | 
| 193 | 
         
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 194 | 
         
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 195 | 
         
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 196 | 
         
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 197 | 
         
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 198 | 
         
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 199 | 
         
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 200 | 
         
            +
                "model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 201 | 
         
            +
                "model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 202 | 
         
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 203 | 
         
            +
                "model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 204 | 
         
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 205 | 
         
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 206 | 
         
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 207 | 
         
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 208 | 
         
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 209 | 
         
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 210 | 
         
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         
     | 
| 211 | 
         
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         
     | 
| 212 | 
         
            +
                "model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 213 | 
         
            +
                "model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 214 | 
         
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 215 | 
         
            +
                "model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 216 | 
         
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 217 | 
         
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 218 | 
         
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 219 | 
         
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 220 | 
         
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 221 | 
         
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 222 | 
         
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 223 | 
         
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 224 | 
         
            +
                "model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 225 | 
         
            +
                "model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 226 | 
         
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 227 | 
         
            +
                "model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 228 | 
         
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 229 | 
         
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 230 | 
         
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 231 | 
         
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 232 | 
         
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 233 | 
         
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 234 | 
         
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 235 | 
         
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 236 | 
         
            +
                "model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 237 | 
         
            +
                "model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 238 | 
         
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 239 | 
         
            +
                "model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 240 | 
         
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 241 | 
         
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 242 | 
         
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 243 | 
         
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 244 | 
         
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 245 | 
         
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 246 | 
         
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 247 | 
         
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 248 | 
         
            +
                "model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 249 | 
         
            +
                "model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 250 | 
         
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 251 | 
         
            +
                "model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 252 | 
         
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         
     | 
| 253 | 
         
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 254 | 
         
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 255 | 
         
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 256 | 
         
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 257 | 
         
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 258 | 
         
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 259 | 
         
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 260 | 
         
            +
                "model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 261 | 
         
            +
                "model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 262 | 
         
            +
                "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 263 | 
         
            +
                "model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 264 | 
         
            +
                "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 265 | 
         
            +
                "model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 266 | 
         
            +
                "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 267 | 
         
            +
                "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 268 | 
         
            +
                "model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 269 | 
         
            +
                "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 270 | 
         
            +
                "model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         
     | 
| 271 | 
         
            +
                "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         
     | 
| 272 | 
         
            +
                "model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 273 | 
         
            +
                "model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 274 | 
         
            +
                "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 275 | 
         
            +
                "model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 276 | 
         
            +
                "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 277 | 
         
            +
                "model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 278 | 
         
            +
                "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 279 | 
         
            +
                "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 280 | 
         
            +
                "model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 281 | 
         
            +
                "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 282 | 
         
            +
                "model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 283 | 
         
            +
                "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 284 | 
         
            +
                "model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 285 | 
         
            +
                "model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 286 | 
         
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 287 | 
         
            +
                "model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 288 | 
         
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 289 | 
         
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 290 | 
         
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 291 | 
         
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 292 | 
         
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 293 | 
         
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 294 | 
         
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         
     | 
| 295 | 
         
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         
     | 
| 296 | 
         
            +
                "model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 297 | 
         
            +
                "model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 298 | 
         
            +
                "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 299 | 
         
            +
                "model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 300 | 
         
            +
                "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 301 | 
         
            +
                "model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 302 | 
         
            +
                "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 303 | 
         
            +
                "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 304 | 
         
            +
                "model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 305 | 
         
            +
                "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 306 | 
         
            +
                "model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 307 | 
         
            +
                "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 308 | 
         
            +
                "model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 309 | 
         
            +
                "model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 310 | 
         
            +
                "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 311 | 
         
            +
                "model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 312 | 
         
            +
                "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 313 | 
         
            +
                "model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 314 | 
         
            +
                "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 315 | 
         
            +
                "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 316 | 
         
            +
                "model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 317 | 
         
            +
                "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 318 | 
         
            +
                "model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 319 | 
         
            +
                "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 320 | 
         
            +
                "model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 321 | 
         
            +
                "model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 322 | 
         
            +
                "model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 323 | 
         
            +
                "model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 324 | 
         
            +
                "model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         
     | 
| 325 | 
         
            +
                "model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 326 | 
         
            +
                "model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 327 | 
         
            +
                "model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 328 | 
         
            +
                "model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 329 | 
         
            +
                "model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 330 | 
         
            +
                "model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 331 | 
         
            +
                "model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 332 | 
         
            +
                "model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 333 | 
         
            +
                "model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 334 | 
         
            +
                "model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 335 | 
         
            +
                "model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 336 | 
         
            +
                "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 337 | 
         
            +
                "model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 338 | 
         
            +
                "model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 339 | 
         
            +
                "model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 340 | 
         
            +
                "model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 341 | 
         
            +
                "model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 342 | 
         
            +
                "model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         
     | 
| 343 | 
         
            +
                "model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         
     | 
| 344 | 
         
            +
                "model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 345 | 
         
            +
                "model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 346 | 
         
            +
                "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 347 | 
         
            +
                "model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 348 | 
         
            +
                "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 349 | 
         
            +
                "model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 350 | 
         
            +
                "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 351 | 
         
            +
                "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 352 | 
         
            +
                "model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 353 | 
         
            +
                "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 354 | 
         
            +
                "model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 355 | 
         
            +
                "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 356 | 
         
            +
                "model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 357 | 
         
            +
                "model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 358 | 
         
            +
                "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 359 | 
         
            +
                "model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 360 | 
         
            +
                "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 361 | 
         
            +
                "model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 362 | 
         
            +
                "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 363 | 
         
            +
                "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 364 | 
         
            +
                "model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 365 | 
         
            +
                "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 366 | 
         
            +
                "model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 367 | 
         
            +
                "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 368 | 
         
            +
                "model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 369 | 
         
            +
                "model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 370 | 
         
            +
                "model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 371 | 
         
            +
                "model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 372 | 
         
            +
                "model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 373 | 
         
            +
                "model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 374 | 
         
            +
                "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 375 | 
         
            +
                "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 376 | 
         
            +
                "model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 377 | 
         
            +
                "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 378 | 
         
            +
                "model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 379 | 
         
            +
                "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 380 | 
         
            +
                "model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 381 | 
         
            +
                "model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 382 | 
         
            +
                "model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 383 | 
         
            +
                "model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 384 | 
         
            +
                "model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         
     | 
| 385 | 
         
            +
                "model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 386 | 
         
            +
                "model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 387 | 
         
            +
                "model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 388 | 
         
            +
                "model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 389 | 
         
            +
                "model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 390 | 
         
            +
                "model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 391 | 
         
            +
                "model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 392 | 
         
            +
                "model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 393 | 
         
            +
                "model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 394 | 
         
            +
                "model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 395 | 
         
            +
                "model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 396 | 
         
            +
                "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 397 | 
         
            +
                "model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 398 | 
         
            +
                "model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 399 | 
         
            +
                "model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 400 | 
         
            +
                "model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 401 | 
         
            +
                "model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 402 | 
         
            +
                "model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         
     | 
| 403 | 
         
            +
                "model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         
     | 
| 404 | 
         
            +
                "model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 405 | 
         
            +
                "model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 406 | 
         
            +
                "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 407 | 
         
            +
                "model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 408 | 
         
            +
                "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 409 | 
         
            +
                "model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 410 | 
         
            +
                "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 411 | 
         
            +
                "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 412 | 
         
            +
                "model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 413 | 
         
            +
                "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 414 | 
         
            +
                "model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 415 | 
         
            +
                "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 416 | 
         
            +
                "model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 417 | 
         
            +
                "model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 418 | 
         
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 419 | 
         
            +
                "model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 420 | 
         
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 421 | 
         
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 422 | 
         
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 423 | 
         
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 424 | 
         
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 425 | 
         
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 426 | 
         
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 427 | 
         
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 428 | 
         
            +
                "model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 429 | 
         
            +
                "model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 430 | 
         
            +
                "model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 431 | 
         
            +
                "model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 432 | 
         
            +
                "model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 433 | 
         
            +
                "model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 434 | 
         
            +
                "model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 435 | 
         
            +
                "model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 436 | 
         
            +
                "model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 437 | 
         
            +
                "model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 438 | 
         
            +
                "model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 439 | 
         
            +
                "model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 440 | 
         
            +
                "model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 441 | 
         
            +
                "model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 442 | 
         
            +
                "model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 443 | 
         
            +
                "model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 444 | 
         
            +
                "model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 445 | 
         
            +
                "model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 446 | 
         
            +
                "model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 447 | 
         
            +
                "model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 448 | 
         
            +
                "model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 449 | 
         
            +
                "model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 450 | 
         
            +
                "model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 451 | 
         
            +
                "model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 452 | 
         
            +
                "model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 453 | 
         
            +
                "model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 454 | 
         
            +
                "model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 455 | 
         
            +
                "model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 456 | 
         
            +
                "model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         
     | 
| 457 | 
         
            +
                "model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 458 | 
         
            +
                "model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 459 | 
         
            +
                "model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 460 | 
         
            +
                "model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 461 | 
         
            +
                "model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 462 | 
         
            +
                "model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 463 | 
         
            +
                "model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 464 | 
         
            +
                "model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 465 | 
         
            +
                "model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 466 | 
         
            +
                "model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 467 | 
         
            +
                "model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 468 | 
         
            +
                "model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 469 | 
         
            +
                "model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 470 | 
         
            +
                "model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 471 | 
         
            +
                "model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 472 | 
         
            +
                "model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 473 | 
         
            +
                "model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 474 | 
         
            +
                "model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         
     | 
| 475 | 
         
            +
                "model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         
     | 
| 476 | 
         
            +
                "model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 477 | 
         
            +
                "model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 478 | 
         
            +
                "model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 479 | 
         
            +
                "model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 480 | 
         
            +
                "model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 481 | 
         
            +
                "model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 482 | 
         
            +
                "model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 483 | 
         
            +
                "model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 484 | 
         
            +
                "model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 485 | 
         
            +
                "model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 486 | 
         
            +
                "model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 487 | 
         
            +
                "model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 488 | 
         
            +
                "model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 489 | 
         
            +
                "model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 490 | 
         
            +
                "model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 491 | 
         
            +
                "model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 492 | 
         
            +
                "model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 493 | 
         
            +
                "model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 494 | 
         
            +
                "model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 495 | 
         
            +
                "model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 496 | 
         
            +
                "model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 497 | 
         
            +
                "model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 498 | 
         
            +
                "model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 499 | 
         
            +
                "model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 500 | 
         
            +
                "model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 501 | 
         
            +
                "model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 502 | 
         
            +
                "model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 503 | 
         
            +
                "model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 504 | 
         
            +
                "model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 505 | 
         
            +
                "model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 506 | 
         
            +
                "model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 507 | 
         
            +
                "model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 508 | 
         
            +
                "model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 509 | 
         
            +
                "model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 510 | 
         
            +
                "model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 511 | 
         
            +
                "model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 512 | 
         
            +
                "model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 513 | 
         
            +
                "model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 514 | 
         
            +
                "model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 515 | 
         
            +
                "model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 516 | 
         
            +
                "model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         
     | 
| 517 | 
         
            +
                "model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 518 | 
         
            +
                "model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 519 | 
         
            +
                "model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 520 | 
         
            +
                "model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 521 | 
         
            +
                "model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 522 | 
         
            +
                "model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 523 | 
         
            +
                "model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 524 | 
         
            +
                "model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 525 | 
         
            +
                "model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 526 | 
         
            +
                "model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 527 | 
         
            +
                "model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 528 | 
         
            +
                "model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 529 | 
         
            +
                "model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 530 | 
         
            +
                "model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 531 | 
         
            +
                "model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 532 | 
         
            +
                "model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 533 | 
         
            +
                "model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 534 | 
         
            +
                "model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         
     | 
| 535 | 
         
            +
                "model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         
     | 
| 536 | 
         
            +
                "model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 537 | 
         
            +
                "model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 538 | 
         
            +
                "model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 539 | 
         
            +
                "model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 540 | 
         
            +
                "model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 541 | 
         
            +
                "model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 542 | 
         
            +
                "model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 543 | 
         
            +
                "model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 544 | 
         
            +
                "model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 545 | 
         
            +
                "model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 546 | 
         
            +
                "model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 547 | 
         
            +
                "model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 548 | 
         
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 549 | 
         
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 550 | 
         
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 551 | 
         
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 552 | 
         
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 553 | 
         
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 554 | 
         
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 555 | 
         
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 556 | 
         
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 557 | 
         
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 558 | 
         
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 559 | 
         
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 560 | 
         
            +
                "model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 561 | 
         
            +
                "model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 562 | 
         
            +
                "model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 563 | 
         
            +
                "model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 564 | 
         
            +
                "model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 565 | 
         
            +
                "model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 566 | 
         
            +
                "model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 567 | 
         
            +
                "model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 568 | 
         
            +
                "model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 569 | 
         
            +
                "model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 570 | 
         
            +
                "model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 571 | 
         
            +
                "model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 572 | 
         
            +
                "model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 573 | 
         
            +
                "model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 574 | 
         
            +
                "model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 575 | 
         
            +
                "model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 576 | 
         
            +
                "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 577 | 
         
            +
                "model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 578 | 
         
            +
                "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 579 | 
         
            +
                "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 580 | 
         
            +
                "model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 581 | 
         
            +
                "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 582 | 
         
            +
                "model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 583 | 
         
            +
                "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 584 | 
         
            +
                "model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 585 | 
         
            +
                "model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 586 | 
         
            +
                "model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 587 | 
         
            +
                "model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 588 | 
         
            +
                "model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         
     | 
| 589 | 
         
            +
                "model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 590 | 
         
            +
                "model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 591 | 
         
            +
                "model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 592 | 
         
            +
                "model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 593 | 
         
            +
                "model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 594 | 
         
            +
                "model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 595 | 
         
            +
                "model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 596 | 
         
            +
                "model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 597 | 
         
            +
                "model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 598 | 
         
            +
                "model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 599 | 
         
            +
                "model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 600 | 
         
            +
                "model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 601 | 
         
            +
                "model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 602 | 
         
            +
                "model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 603 | 
         
            +
                "model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 604 | 
         
            +
                "model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 605 | 
         
            +
                "model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 606 | 
         
            +
                "model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         
     | 
| 607 | 
         
            +
                "model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         
     | 
| 608 | 
         
            +
                "model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 609 | 
         
            +
                "model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 610 | 
         
            +
                "model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 611 | 
         
            +
                "model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 612 | 
         
            +
                "model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 613 | 
         
            +
                "model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 614 | 
         
            +
                "model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 615 | 
         
            +
                "model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 616 | 
         
            +
                "model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 617 | 
         
            +
                "model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 618 | 
         
            +
                "model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 619 | 
         
            +
                "model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 620 | 
         
            +
                "model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 621 | 
         
            +
                "model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 622 | 
         
            +
                "model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 623 | 
         
            +
                "model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 624 | 
         
            +
                "model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 625 | 
         
            +
                "model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 626 | 
         
            +
                "model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 627 | 
         
            +
                "model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 628 | 
         
            +
                "model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 629 | 
         
            +
                "model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 630 | 
         
            +
                "model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 631 | 
         
            +
                "model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 632 | 
         
            +
                "model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 633 | 
         
            +
                "model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 634 | 
         
            +
                "model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 635 | 
         
            +
                "model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 636 | 
         
            +
                "model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 637 | 
         
            +
                "model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 638 | 
         
            +
                "model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 639 | 
         
            +
                "model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 640 | 
         
            +
                "model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 641 | 
         
            +
                "model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 642 | 
         
            +
                "model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 643 | 
         
            +
                "model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 644 | 
         
            +
                "model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 645 | 
         
            +
                "model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 646 | 
         
            +
                "model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 647 | 
         
            +
                "model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 648 | 
         
            +
                "model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         
     | 
| 649 | 
         
            +
                "model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 650 | 
         
            +
                "model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 651 | 
         
            +
                "model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 652 | 
         
            +
                "model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 653 | 
         
            +
                "model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 654 | 
         
            +
                "model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 655 | 
         
            +
                "model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 656 | 
         
            +
                "model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 657 | 
         
            +
                "model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 658 | 
         
            +
                "model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 659 | 
         
            +
                "model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 660 | 
         
            +
                "model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 661 | 
         
            +
                "model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 662 | 
         
            +
                "model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 663 | 
         
            +
                "model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 664 | 
         
            +
                "model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 665 | 
         
            +
                "model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 666 | 
         
            +
                "model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         
     | 
| 667 | 
         
            +
                "model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         
     | 
| 668 | 
         
            +
                "model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 669 | 
         
            +
                "model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 670 | 
         
            +
                "model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 671 | 
         
            +
                "model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 672 | 
         
            +
                "model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 673 | 
         
            +
                "model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 674 | 
         
            +
                "model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 675 | 
         
            +
                "model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 676 | 
         
            +
                "model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 677 | 
         
            +
                "model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 678 | 
         
            +
                "model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 679 | 
         
            +
                "model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 680 | 
         
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 681 | 
         
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 682 | 
         
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 683 | 
         
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 684 | 
         
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 685 | 
         
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 686 | 
         
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 687 | 
         
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 688 | 
         
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 689 | 
         
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 690 | 
         
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 691 | 
         
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 692 | 
         
            +
                "model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 693 | 
         
            +
                "model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 694 | 
         
            +
                "model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 695 | 
         
            +
                "model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 696 | 
         
            +
                "model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 697 | 
         
            +
                "model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 698 | 
         
            +
                "model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 699 | 
         
            +
                "model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 700 | 
         
            +
                "model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 701 | 
         
            +
                "model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 702 | 
         
            +
                "model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 703 | 
         
            +
                "model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 704 | 
         
            +
                "model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 705 | 
         
            +
                "model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 706 | 
         
            +
                "model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 707 | 
         
            +
                "model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 708 | 
         
            +
                "model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 709 | 
         
            +
                "model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 710 | 
         
            +
                "model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 711 | 
         
            +
                "model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 712 | 
         
            +
                "model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 713 | 
         
            +
                "model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 714 | 
         
            +
                "model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 715 | 
         
            +
                "model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 716 | 
         
            +
                "model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 717 | 
         
            +
                "model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 718 | 
         
            +
                "model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 719 | 
         
            +
                "model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 720 | 
         
            +
                "model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         
     | 
| 721 | 
         
            +
                "model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 722 | 
         
            +
                "model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 723 | 
         
            +
                "model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 724 | 
         
            +
                "model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 725 | 
         
            +
                "model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 726 | 
         
            +
                "model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 727 | 
         
            +
                "model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 728 | 
         
            +
                "model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
         
     | 
| 729 | 
         
            +
                "model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
         
     | 
| 730 | 
         
            +
                "model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 731 | 
         
            +
                "model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
         
     | 
| 732 | 
         
            +
                "model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
         
     | 
| 733 | 
         
            +
                "model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 734 | 
         
            +
                "model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 735 | 
         
            +
                "model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 736 | 
         
            +
                "model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 737 | 
         
            +
                "model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 738 | 
         
            +
                "model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         
     | 
| 739 | 
         
            +
                "model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         
     | 
| 740 | 
         
            +
                "model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 741 | 
         
            +
                "model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 742 | 
         
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 743 | 
         
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 744 | 
         
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         
     | 
| 745 | 
         
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 746 | 
         
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 747 | 
         
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 748 | 
         
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 749 | 
         
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 750 | 
         
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 751 | 
         
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 752 | 
         
            +
                "model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 753 | 
         
            +
                "model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 754 | 
         
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 755 | 
         
            +
                "model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 756 | 
         
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 757 | 
         
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 758 | 
         
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 759 | 
         
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 760 | 
         
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 761 | 
         
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 762 | 
         
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         
     | 
| 763 | 
         
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         
     | 
| 764 | 
         
            +
                "model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 765 | 
         
            +
                "model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 766 | 
         
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 767 | 
         
            +
                "model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 768 | 
         
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         
     | 
| 769 | 
         
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 770 | 
         
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 771 | 
         
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 772 | 
         
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 773 | 
         
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 774 | 
         
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         
     | 
| 775 | 
         
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         
     | 
| 776 | 
         
            +
                "model.norm.weight": "model-00014-of-00014.safetensors"
         
     | 
| 777 | 
         
            +
              }
         
     | 
| 778 | 
         
            +
            }
         
     | 
    	
        special_tokens_map.json
    ADDED
    
    | 
         @@ -0,0 +1,31 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 3 | 
         
            +
                "<|im_start|>",
         
     | 
| 4 | 
         
            +
                "<|im_end|>",
         
     | 
| 5 | 
         
            +
                "<|object_ref_start|>",
         
     | 
| 6 | 
         
            +
                "<|object_ref_end|>",
         
     | 
| 7 | 
         
            +
                "<|box_start|>",
         
     | 
| 8 | 
         
            +
                "<|box_end|>",
         
     | 
| 9 | 
         
            +
                "<|quad_start|>",
         
     | 
| 10 | 
         
            +
                "<|quad_end|>",
         
     | 
| 11 | 
         
            +
                "<|vision_start|>",
         
     | 
| 12 | 
         
            +
                "<|vision_end|>",
         
     | 
| 13 | 
         
            +
                "<|vision_pad|>",
         
     | 
| 14 | 
         
            +
                "<|image_pad|>",
         
     | 
| 15 | 
         
            +
                "<|video_pad|>"
         
     | 
| 16 | 
         
            +
              ],
         
     | 
| 17 | 
         
            +
              "eos_token": {
         
     | 
| 18 | 
         
            +
                "content": "<|im_end|>",
         
     | 
| 19 | 
         
            +
                "lstrip": false,
         
     | 
| 20 | 
         
            +
                "normalized": false,
         
     | 
| 21 | 
         
            +
                "rstrip": false,
         
     | 
| 22 | 
         
            +
                "single_word": false
         
     | 
| 23 | 
         
            +
              },
         
     | 
| 24 | 
         
            +
              "pad_token": {
         
     | 
| 25 | 
         
            +
                "content": "<|endoftext|>",
         
     | 
| 26 | 
         
            +
                "lstrip": false,
         
     | 
| 27 | 
         
            +
                "normalized": false,
         
     | 
| 28 | 
         
            +
                "rstrip": false,
         
     | 
| 29 | 
         
            +
                "single_word": false
         
     | 
| 30 | 
         
            +
              }
         
     | 
| 31 | 
         
            +
            }
         
     | 
    	
        tokenizer.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        tokenizer_config.json
    ADDED
    
    | 
         @@ -0,0 +1,239 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "add_bos_token": false,
         
     | 
| 3 | 
         
            +
              "add_prefix_space": false,
         
     | 
| 4 | 
         
            +
              "added_tokens_decoder": {
         
     | 
| 5 | 
         
            +
                "151643": {
         
     | 
| 6 | 
         
            +
                  "content": "<|endoftext|>",
         
     | 
| 7 | 
         
            +
                  "lstrip": false,
         
     | 
| 8 | 
         
            +
                  "normalized": false,
         
     | 
| 9 | 
         
            +
                  "rstrip": false,
         
     | 
| 10 | 
         
            +
                  "single_word": false,
         
     | 
| 11 | 
         
            +
                  "special": true
         
     | 
| 12 | 
         
            +
                },
         
     | 
| 13 | 
         
            +
                "151644": {
         
     | 
| 14 | 
         
            +
                  "content": "<|im_start|>",
         
     | 
| 15 | 
         
            +
                  "lstrip": false,
         
     | 
| 16 | 
         
            +
                  "normalized": false,
         
     | 
| 17 | 
         
            +
                  "rstrip": false,
         
     | 
| 18 | 
         
            +
                  "single_word": false,
         
     | 
| 19 | 
         
            +
                  "special": true
         
     | 
| 20 | 
         
            +
                },
         
     | 
| 21 | 
         
            +
                "151645": {
         
     | 
| 22 | 
         
            +
                  "content": "<|im_end|>",
         
     | 
| 23 | 
         
            +
                  "lstrip": false,
         
     | 
| 24 | 
         
            +
                  "normalized": false,
         
     | 
| 25 | 
         
            +
                  "rstrip": false,
         
     | 
| 26 | 
         
            +
                  "single_word": false,
         
     | 
| 27 | 
         
            +
                  "special": true
         
     | 
| 28 | 
         
            +
                },
         
     | 
| 29 | 
         
            +
                "151646": {
         
     | 
| 30 | 
         
            +
                  "content": "<|object_ref_start|>",
         
     | 
| 31 | 
         
            +
                  "lstrip": false,
         
     | 
| 32 | 
         
            +
                  "normalized": false,
         
     | 
| 33 | 
         
            +
                  "rstrip": false,
         
     | 
| 34 | 
         
            +
                  "single_word": false,
         
     | 
| 35 | 
         
            +
                  "special": true
         
     | 
| 36 | 
         
            +
                },
         
     | 
| 37 | 
         
            +
                "151647": {
         
     | 
| 38 | 
         
            +
                  "content": "<|object_ref_end|>",
         
     | 
| 39 | 
         
            +
                  "lstrip": false,
         
     | 
| 40 | 
         
            +
                  "normalized": false,
         
     | 
| 41 | 
         
            +
                  "rstrip": false,
         
     | 
| 42 | 
         
            +
                  "single_word": false,
         
     | 
| 43 | 
         
            +
                  "special": true
         
     | 
| 44 | 
         
            +
                },
         
     | 
| 45 | 
         
            +
                "151648": {
         
     | 
| 46 | 
         
            +
                  "content": "<|box_start|>",
         
     | 
| 47 | 
         
            +
                  "lstrip": false,
         
     | 
| 48 | 
         
            +
                  "normalized": false,
         
     | 
| 49 | 
         
            +
                  "rstrip": false,
         
     | 
| 50 | 
         
            +
                  "single_word": false,
         
     | 
| 51 | 
         
            +
                  "special": true
         
     | 
| 52 | 
         
            +
                },
         
     | 
| 53 | 
         
            +
                "151649": {
         
     | 
| 54 | 
         
            +
                  "content": "<|box_end|>",
         
     | 
| 55 | 
         
            +
                  "lstrip": false,
         
     | 
| 56 | 
         
            +
                  "normalized": false,
         
     | 
| 57 | 
         
            +
                  "rstrip": false,
         
     | 
| 58 | 
         
            +
                  "single_word": false,
         
     | 
| 59 | 
         
            +
                  "special": true
         
     | 
| 60 | 
         
            +
                },
         
     | 
| 61 | 
         
            +
                "151650": {
         
     | 
| 62 | 
         
            +
                  "content": "<|quad_start|>",
         
     | 
| 63 | 
         
            +
                  "lstrip": false,
         
     | 
| 64 | 
         
            +
                  "normalized": false,
         
     | 
| 65 | 
         
            +
                  "rstrip": false,
         
     | 
| 66 | 
         
            +
                  "single_word": false,
         
     | 
| 67 | 
         
            +
                  "special": true
         
     | 
| 68 | 
         
            +
                },
         
     | 
| 69 | 
         
            +
                "151651": {
         
     | 
| 70 | 
         
            +
                  "content": "<|quad_end|>",
         
     | 
| 71 | 
         
            +
                  "lstrip": false,
         
     | 
| 72 | 
         
            +
                  "normalized": false,
         
     | 
| 73 | 
         
            +
                  "rstrip": false,
         
     | 
| 74 | 
         
            +
                  "single_word": false,
         
     | 
| 75 | 
         
            +
                  "special": true
         
     | 
| 76 | 
         
            +
                },
         
     | 
| 77 | 
         
            +
                "151652": {
         
     | 
| 78 | 
         
            +
                  "content": "<|vision_start|>",
         
     | 
| 79 | 
         
            +
                  "lstrip": false,
         
     | 
| 80 | 
         
            +
                  "normalized": false,
         
     | 
| 81 | 
         
            +
                  "rstrip": false,
         
     | 
| 82 | 
         
            +
                  "single_word": false,
         
     | 
| 83 | 
         
            +
                  "special": true
         
     | 
| 84 | 
         
            +
                },
         
     | 
| 85 | 
         
            +
                "151653": {
         
     | 
| 86 | 
         
            +
                  "content": "<|vision_end|>",
         
     | 
| 87 | 
         
            +
                  "lstrip": false,
         
     | 
| 88 | 
         
            +
                  "normalized": false,
         
     | 
| 89 | 
         
            +
                  "rstrip": false,
         
     | 
| 90 | 
         
            +
                  "single_word": false,
         
     | 
| 91 | 
         
            +
                  "special": true
         
     | 
| 92 | 
         
            +
                },
         
     | 
| 93 | 
         
            +
                "151654": {
         
     | 
| 94 | 
         
            +
                  "content": "<|vision_pad|>",
         
     | 
| 95 | 
         
            +
                  "lstrip": false,
         
     | 
| 96 | 
         
            +
                  "normalized": false,
         
     | 
| 97 | 
         
            +
                  "rstrip": false,
         
     | 
| 98 | 
         
            +
                  "single_word": false,
         
     | 
| 99 | 
         
            +
                  "special": true
         
     | 
| 100 | 
         
            +
                },
         
     | 
| 101 | 
         
            +
                "151655": {
         
     | 
| 102 | 
         
            +
                  "content": "<|image_pad|>",
         
     | 
| 103 | 
         
            +
                  "lstrip": false,
         
     | 
| 104 | 
         
            +
                  "normalized": false,
         
     | 
| 105 | 
         
            +
                  "rstrip": false,
         
     | 
| 106 | 
         
            +
                  "single_word": false,
         
     | 
| 107 | 
         
            +
                  "special": true
         
     | 
| 108 | 
         
            +
                },
         
     | 
| 109 | 
         
            +
                "151656": {
         
     | 
| 110 | 
         
            +
                  "content": "<|video_pad|>",
         
     | 
| 111 | 
         
            +
                  "lstrip": false,
         
     | 
| 112 | 
         
            +
                  "normalized": false,
         
     | 
| 113 | 
         
            +
                  "rstrip": false,
         
     | 
| 114 | 
         
            +
                  "single_word": false,
         
     | 
| 115 | 
         
            +
                  "special": true
         
     | 
| 116 | 
         
            +
                },
         
     | 
| 117 | 
         
            +
                "151657": {
         
     | 
| 118 | 
         
            +
                  "content": "<tool_call>",
         
     | 
| 119 | 
         
            +
                  "lstrip": false,
         
     | 
| 120 | 
         
            +
                  "normalized": false,
         
     | 
| 121 | 
         
            +
                  "rstrip": false,
         
     | 
| 122 | 
         
            +
                  "single_word": false,
         
     | 
| 123 | 
         
            +
                  "special": false
         
     | 
| 124 | 
         
            +
                },
         
     | 
| 125 | 
         
            +
                "151658": {
         
     | 
| 126 | 
         
            +
                  "content": "</tool_call>",
         
     | 
| 127 | 
         
            +
                  "lstrip": false,
         
     | 
| 128 | 
         
            +
                  "normalized": false,
         
     | 
| 129 | 
         
            +
                  "rstrip": false,
         
     | 
| 130 | 
         
            +
                  "single_word": false,
         
     | 
| 131 | 
         
            +
                  "special": false
         
     | 
| 132 | 
         
            +
                },
         
     | 
| 133 | 
         
            +
                "151659": {
         
     | 
| 134 | 
         
            +
                  "content": "<|fim_prefix|>",
         
     | 
| 135 | 
         
            +
                  "lstrip": false,
         
     | 
| 136 | 
         
            +
                  "normalized": false,
         
     | 
| 137 | 
         
            +
                  "rstrip": false,
         
     | 
| 138 | 
         
            +
                  "single_word": false,
         
     | 
| 139 | 
         
            +
                  "special": false
         
     | 
| 140 | 
         
            +
                },
         
     | 
| 141 | 
         
            +
                "151660": {
         
     | 
| 142 | 
         
            +
                  "content": "<|fim_middle|>",
         
     | 
| 143 | 
         
            +
                  "lstrip": false,
         
     | 
| 144 | 
         
            +
                  "normalized": false,
         
     | 
| 145 | 
         
            +
                  "rstrip": false,
         
     | 
| 146 | 
         
            +
                  "single_word": false,
         
     | 
| 147 | 
         
            +
                  "special": false
         
     | 
| 148 | 
         
            +
                },
         
     | 
| 149 | 
         
            +
                "151661": {
         
     | 
| 150 | 
         
            +
                  "content": "<|fim_suffix|>",
         
     | 
| 151 | 
         
            +
                  "lstrip": false,
         
     | 
| 152 | 
         
            +
                  "normalized": false,
         
     | 
| 153 | 
         
            +
                  "rstrip": false,
         
     | 
| 154 | 
         
            +
                  "single_word": false,
         
     | 
| 155 | 
         
            +
                  "special": false
         
     | 
| 156 | 
         
            +
                },
         
     | 
| 157 | 
         
            +
                "151662": {
         
     | 
| 158 | 
         
            +
                  "content": "<|fim_pad|>",
         
     | 
| 159 | 
         
            +
                  "lstrip": false,
         
     | 
| 160 | 
         
            +
                  "normalized": false,
         
     | 
| 161 | 
         
            +
                  "rstrip": false,
         
     | 
| 162 | 
         
            +
                  "single_word": false,
         
     | 
| 163 | 
         
            +
                  "special": false
         
     | 
| 164 | 
         
            +
                },
         
     | 
| 165 | 
         
            +
                "151663": {
         
     | 
| 166 | 
         
            +
                  "content": "<|repo_name|>",
         
     | 
| 167 | 
         
            +
                  "lstrip": false,
         
     | 
| 168 | 
         
            +
                  "normalized": false,
         
     | 
| 169 | 
         
            +
                  "rstrip": false,
         
     | 
| 170 | 
         
            +
                  "single_word": false,
         
     | 
| 171 | 
         
            +
                  "special": false
         
     | 
| 172 | 
         
            +
                },
         
     | 
| 173 | 
         
            +
                "151664": {
         
     | 
| 174 | 
         
            +
                  "content": "<|file_sep|>",
         
     | 
| 175 | 
         
            +
                  "lstrip": false,
         
     | 
| 176 | 
         
            +
                  "normalized": false,
         
     | 
| 177 | 
         
            +
                  "rstrip": false,
         
     | 
| 178 | 
         
            +
                  "single_word": false,
         
     | 
| 179 | 
         
            +
                  "special": false
         
     | 
| 180 | 
         
            +
                },
         
     | 
| 181 | 
         
            +
                "151665": {
         
     | 
| 182 | 
         
            +
                  "content": "<tool_response>",
         
     | 
| 183 | 
         
            +
                  "lstrip": false,
         
     | 
| 184 | 
         
            +
                  "normalized": false,
         
     | 
| 185 | 
         
            +
                  "rstrip": false,
         
     | 
| 186 | 
         
            +
                  "single_word": false,
         
     | 
| 187 | 
         
            +
                  "special": false
         
     | 
| 188 | 
         
            +
                },
         
     | 
| 189 | 
         
            +
                "151666": {
         
     | 
| 190 | 
         
            +
                  "content": "</tool_response>",
         
     | 
| 191 | 
         
            +
                  "lstrip": false,
         
     | 
| 192 | 
         
            +
                  "normalized": false,
         
     | 
| 193 | 
         
            +
                  "rstrip": false,
         
     | 
| 194 | 
         
            +
                  "single_word": false,
         
     | 
| 195 | 
         
            +
                  "special": false
         
     | 
| 196 | 
         
            +
                },
         
     | 
| 197 | 
         
            +
                "151667": {
         
     | 
| 198 | 
         
            +
                  "content": "<think>",
         
     | 
| 199 | 
         
            +
                  "lstrip": false,
         
     | 
| 200 | 
         
            +
                  "normalized": false,
         
     | 
| 201 | 
         
            +
                  "rstrip": false,
         
     | 
| 202 | 
         
            +
                  "single_word": false,
         
     | 
| 203 | 
         
            +
                  "special": false
         
     | 
| 204 | 
         
            +
                },
         
     | 
| 205 | 
         
            +
                "151668": {
         
     | 
| 206 | 
         
            +
                  "content": "</think>",
         
     | 
| 207 | 
         
            +
                  "lstrip": false,
         
     | 
| 208 | 
         
            +
                  "normalized": false,
         
     | 
| 209 | 
         
            +
                  "rstrip": false,
         
     | 
| 210 | 
         
            +
                  "single_word": false,
         
     | 
| 211 | 
         
            +
                  "special": false
         
     | 
| 212 | 
         
            +
                }
         
     | 
| 213 | 
         
            +
              },
         
     | 
| 214 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 215 | 
         
            +
                "<|im_start|>",
         
     | 
| 216 | 
         
            +
                "<|im_end|>",
         
     | 
| 217 | 
         
            +
                "<|object_ref_start|>",
         
     | 
| 218 | 
         
            +
                "<|object_ref_end|>",
         
     | 
| 219 | 
         
            +
                "<|box_start|>",
         
     | 
| 220 | 
         
            +
                "<|box_end|>",
         
     | 
| 221 | 
         
            +
                "<|quad_start|>",
         
     | 
| 222 | 
         
            +
                "<|quad_end|>",
         
     | 
| 223 | 
         
            +
                "<|vision_start|>",
         
     | 
| 224 | 
         
            +
                "<|vision_end|>",
         
     | 
| 225 | 
         
            +
                "<|vision_pad|>",
         
     | 
| 226 | 
         
            +
                "<|image_pad|>",
         
     | 
| 227 | 
         
            +
                "<|video_pad|>"
         
     | 
| 228 | 
         
            +
              ],
         
     | 
| 229 | 
         
            +
              "bos_token": null,
         
     | 
| 230 | 
         
            +
              "chat_template": "{%- if tools %}\n    {{- '<|im_start|>system\\n' }}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- messages[0]['content'] }}\n    {%- else %}\n        {{- '' }}\n    {%- endif %}\n    {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n    {%- for tool in tools %}\n        {{- \"\\n\" }}\n        {{- tool | tojson }}\n    {%- endfor %}\n    {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n  {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" and not message.tool_calls %}\n        {%- set content = message.content %}\n        {%- if not loop.last %}\n            {%- set content = message.content.split('</think>')[-1].lstrip('\\n') %}\n        {%- endif %}\n        {{- '<|im_start|>' + message.role + '\\n' + content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {%- set content = message.content %}\n        {%- if not loop.last %}\n            {%- set content = message.content.split('</think>')[-1].lstrip('\\n') %}\n        {%- endif %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n<think>\\n' }}\n{%- endif %}\n",
         
     | 
| 231 | 
         
            +
              "clean_up_tokenization_spaces": false,
         
     | 
| 232 | 
         
            +
              "eos_token": "<|im_end|>",
         
     | 
| 233 | 
         
            +
              "errors": "replace",
         
     | 
| 234 | 
         
            +
              "model_max_length": 131072,
         
     | 
| 235 | 
         
            +
              "pad_token": "<|endoftext|>",
         
     | 
| 236 | 
         
            +
              "split_special_tokens": false,
         
     | 
| 237 | 
         
            +
              "tokenizer_class": "Qwen2Tokenizer",
         
     | 
| 238 | 
         
            +
              "unk_token": null
         
     | 
| 239 | 
         
            +
            }
         
     | 
    	
        vocab.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         |