Eiad Gomaa commited on
Commit
403eecc
·
1 Parent(s): 960b05f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -6,8 +6,8 @@ import torch
6
  def load_model():
7
  """Load model and tokenizer with caching"""
8
  try:
9
- tokenizer = AutoTokenizer.from_pretrained("NousResearch/Hermes-3-Llama-3.1-8B")
10
- model = AutoModelForCausalLM.from_pretrained("NousResearch/Hermes-3-Llama-3.1-8B")
11
  return model, tokenizer
12
  except Exception as e:
13
  st.error(f"Error loading model: {str(e)}")
 
6
  def load_model():
7
  """Load model and tokenizer with caching"""
8
  try:
9
+ tokenizer = AutoTokenizer.from_pretrained("NousResearch/Llama-3.2-1B")
10
+ model = AutoModelForCausalLM.from_pretrained("NousResearch/Llama-3.2-1B")
11
  return model, tokenizer
12
  except Exception as e:
13
  st.error(f"Error loading model: {str(e)}")