Emmanuel Frimpong Asante commited on
Commit
e69e187
·
1 Parent(s): d24eed5

"Update space"

Browse files

Signed-off-by: Emmanuel Frimpong Asante <[email protected]>

Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -7,21 +7,21 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
7
  from huggingface_hub import login
8
  import os
9
 
 
10
  tok = os.getenv('HF_Token')
11
  login(token=tok)
12
 
13
- # Load models
14
  my_model = load_model('models/Final_Chicken_disease_model.h5', compile=True)
15
  auth_model = load_model('models/auth_model.h5', compile=True)
16
 
17
- # Initialize the tokenizer
18
  llama_tokenizer = AutoTokenizer.from_pretrained('meta-llama/Meta-Llama-3-8B-Instruct')
19
-
20
- # Initialize the LLaMA model and force CPU usage
21
  llama_model = AutoModelForCausalLM.from_pretrained(
22
  'meta-llama/Meta-Llama-3-8B-Instruct',
23
- device_map={"": "cpu"} # Force the model to load on the CPU
24
- )
 
25
 
26
  # Dictionaries for disease names, results, and recommendations
27
  name_disease = {0: 'Coccidiosis', 1: 'Healthy', 2: 'New Castle Disease', 3: 'Salmonella'}
 
7
  from huggingface_hub import login
8
  import os
9
 
10
+ # Ensure the HF token is set
11
  tok = os.getenv('HF_Token')
12
  login(token=tok)
13
 
14
+ # Load Keras models
15
  my_model = load_model('models/Final_Chicken_disease_model.h5', compile=True)
16
  auth_model = load_model('models/auth_model.h5', compile=True)
17
 
18
+ # Load the tokenizer and model, ensuring they run on CPU
19
  llama_tokenizer = AutoTokenizer.from_pretrained('meta-llama/Meta-Llama-3-8B-Instruct')
 
 
20
  llama_model = AutoModelForCausalLM.from_pretrained(
21
  'meta-llama/Meta-Llama-3-8B-Instruct',
22
+ device_map=None, # Do not use device_map, force CPU
23
+ torch_dtype=torch.float32 # Ensure model uses float32 for compatibility
24
+ ).to('cpu') # Force the model to CPU
25
 
26
  # Dictionaries for disease names, results, and recommendations
27
  name_disease = {0: 'Coccidiosis', 1: 'Healthy', 2: 'New Castle Disease', 3: 'Salmonella'}