Spaces:
				
			
			
	
			
			
		Build error
		
	
	
	
			
			
	
	
	
	
		
		
		Build error
		
	
		Ethan Chang
		
	commited on
		
		
					Commit 
							
							·
						
						8ea200f
	
1
								Parent(s):
							
							6a3dbe6
								
Removed minor print statement, logging is possible but unimplemented for the current solution
Browse files
    	
        code/modules/chat/chat_model_loader.py
    CHANGED
    
    | @@ -32,7 +32,6 @@ class ChatModelLoader: | |
| 32 | 
             
                    elif self.config["llm_params"]["llm_loader"] == "local_llm":
         | 
| 33 | 
             
                        n_batch = 512  # Should be between 1 and n_ctx, consider the amount of VRAM in your GPU.
         | 
| 34 | 
             
                        model_path = self._verify_model_cache(self.config["llm_params"]["local_llm_params"]["model"])
         | 
| 35 | 
            -
                        print(model_path)
         | 
| 36 | 
             
                        llm = LlamaCpp(
         | 
| 37 | 
             
                            model_path=model_path,
         | 
| 38 | 
             
                            n_batch=n_batch,
         | 
|  | |
| 32 | 
             
                    elif self.config["llm_params"]["llm_loader"] == "local_llm":
         | 
| 33 | 
             
                        n_batch = 512  # Should be between 1 and n_ctx, consider the amount of VRAM in your GPU.
         | 
| 34 | 
             
                        model_path = self._verify_model_cache(self.config["llm_params"]["local_llm_params"]["model"])
         | 
|  | |
| 35 | 
             
                        llm = LlamaCpp(
         | 
| 36 | 
             
                            model_path=model_path,
         | 
| 37 | 
             
                            n_batch=n_batch,
         | 
