Update README.md
Browse files
    	
        README.md
    CHANGED
    
    | 
         @@ -192,7 +192,7 @@ from webscout.Local.samplers import SamplerSettings 
     | 
|
| 192 | 
         
             
            # Download the model
         
     | 
| 193 | 
         
             
            repo_id = "OEvortex/HelpingAI2-9B " 
         
     | 
| 194 | 
         
             
            filename = "helpingai2-9b-q5_0.gguf"
         
     | 
| 195 | 
         
            -
            model_path = download_model(repo_id, filename, token= 
     | 
| 196 | 
         | 
| 197 | 
         
             
            # Load the model
         
     | 
| 198 | 
         
             
            model = Model(model_path, n_gpu_layers=40)
         
     | 
| 
         @@ -202,7 +202,7 @@ system_prompt = "You are HelpingAI, an emotional AI. Always answer my questions 
     | 
|
| 202 | 
         | 
| 203 | 
         
             
            # Create a chat format with your system prompt
         
     | 
| 204 | 
         
             
            helpingai = formats.llama3.copy()
         
     | 
| 205 | 
         
            -
            helpingai[' 
     | 
| 206 | 
         | 
| 207 | 
         
             
            # Define your sampler settings (optional)
         
     | 
| 208 | 
         
             
            sampler = SamplerSettings(temp=0.7, top_p=0.9)
         
     | 
| 
         | 
|
| 192 | 
         
             
            # Download the model
         
     | 
| 193 | 
         
             
            repo_id = "OEvortex/HelpingAI2-9B " 
         
     | 
| 194 | 
         
             
            filename = "helpingai2-9b-q5_0.gguf"
         
     | 
| 195 | 
         
            +
            model_path = download_model(repo_id, filename, token=None)
         
     | 
| 196 | 
         | 
| 197 | 
         
             
            # Load the model
         
     | 
| 198 | 
         
             
            model = Model(model_path, n_gpu_layers=40)
         
     | 
| 
         | 
|
| 202 | 
         | 
| 203 | 
         
             
            # Create a chat format with your system prompt
         
     | 
| 204 | 
         
             
            helpingai = formats.llama3.copy()
         
     | 
| 205 | 
         
            +
            helpingai['system_prompt'] = system_prompt
         
     | 
| 206 | 
         | 
| 207 | 
         
             
            # Define your sampler settings (optional)
         
     | 
| 208 | 
         
             
            sampler = SamplerSettings(temp=0.7, top_p=0.9)
         
     |