Upload folder using huggingface_hub
Browse files- README.md +1 -1
- config.json +0 -3
    	
        README.md
    CHANGED
    
    | @@ -25,7 +25,7 @@ base_model: | |
| 25 | 
             
            ---
         | 
| 26 |  | 
| 27 | 
             
            # EZCon/SmolVLM2-2.2B-Instruct-8bit-mlx
         | 
| 28 | 
            -
            This model was converted to MLX format from [`HuggingFaceTB/SmolVLM2-2.2B-Instruct`]() using mlx-vlm version **0.3. | 
| 29 | 
             
            Refer to the [original model card](https://huggingface.co/HuggingFaceTB/SmolVLM2-2.2B-Instruct) for more details on the model.
         | 
| 30 | 
             
            ## Use with mlx
         | 
| 31 |  | 
|  | |
| 25 | 
             
            ---
         | 
| 26 |  | 
| 27 | 
             
            # EZCon/SmolVLM2-2.2B-Instruct-8bit-mlx
         | 
| 28 | 
            +
            This model was converted to MLX format from [`HuggingFaceTB/SmolVLM2-2.2B-Instruct`]() using mlx-vlm version **0.3.4**.
         | 
| 29 | 
             
            Refer to the [original model card](https://huggingface.co/HuggingFaceTB/SmolVLM2-2.2B-Instruct) for more details on the model.
         | 
| 30 | 
             
            ## Use with mlx
         | 
| 31 |  | 
    	
        config.json
    CHANGED
    
    | @@ -44,7 +44,6 @@ | |
| 44 | 
             
                "pad_token_id": 128002,
         | 
| 45 | 
             
                "prefix": null,
         | 
| 46 | 
             
                "problem_type": null,
         | 
| 47 | 
            -
                "pruned_heads": {},
         | 
| 48 | 
             
                "quantization": {
         | 
| 49 | 
             
                    "group_size": 64,
         | 
| 50 | 
             
                    "bits": 8,
         | 
| @@ -87,7 +86,6 @@ | |
| 87 | 
             
                    "output_hidden_states": false,
         | 
| 88 | 
             
                    "torchscript": false,
         | 
| 89 | 
             
                    "dtype": "bfloat16",
         | 
| 90 | 
            -
                    "pruned_heads": {},
         | 
| 91 | 
             
                    "tie_word_embeddings": false,
         | 
| 92 | 
             
                    "chunk_size_feed_forward": 0,
         | 
| 93 | 
             
                    "is_encoder_decoder": false,
         | 
| @@ -244,7 +242,6 @@ | |
| 244 | 
             
                    "output_hidden_states": false,
         | 
| 245 | 
             
                    "torchscript": false,
         | 
| 246 | 
             
                    "dtype": null,
         | 
| 247 | 
            -
                    "pruned_heads": {},
         | 
| 248 | 
             
                    "tie_word_embeddings": false,
         | 
| 249 | 
             
                    "chunk_size_feed_forward": 0,
         | 
| 250 | 
             
                    "is_encoder_decoder": false,
         | 
|  | |
| 44 | 
             
                "pad_token_id": 128002,
         | 
| 45 | 
             
                "prefix": null,
         | 
| 46 | 
             
                "problem_type": null,
         | 
|  | |
| 47 | 
             
                "quantization": {
         | 
| 48 | 
             
                    "group_size": 64,
         | 
| 49 | 
             
                    "bits": 8,
         | 
|  | |
| 86 | 
             
                    "output_hidden_states": false,
         | 
| 87 | 
             
                    "torchscript": false,
         | 
| 88 | 
             
                    "dtype": "bfloat16",
         | 
|  | |
| 89 | 
             
                    "tie_word_embeddings": false,
         | 
| 90 | 
             
                    "chunk_size_feed_forward": 0,
         | 
| 91 | 
             
                    "is_encoder_decoder": false,
         | 
|  | |
| 242 | 
             
                    "output_hidden_states": false,
         | 
| 243 | 
             
                    "torchscript": false,
         | 
| 244 | 
             
                    "dtype": null,
         | 
|  | |
| 245 | 
             
                    "tie_word_embeddings": false,
         | 
| 246 | 
             
                    "chunk_size_feed_forward": 0,
         | 
| 247 | 
             
                    "is_encoder_decoder": false,
         | 
