osmankoc commited on
Commit
53b7ea9
·
1 Parent(s): dbea975

revert and fix

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -5,13 +5,15 @@ import torch
5
 
6
  MODEL_NAME = "osmankoc/llama-2-7b-zoa"
7
 
 
 
 
 
 
 
8
  # ZeroGPU için model GPU'ya sadece gerektiğinde yüklenecek
9
  @spaces.GPU
10
  def generate(prompt):
11
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
12
- model = AutoModelForCausalLM.from_pretrained(
13
- MODEL_NAME, torch_dtype=torch.float16, device_map="auto"
14
- )
15
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
16
  output = model.generate(**inputs, max_length=500)
17
  response = tokenizer.decode(output[0], skip_special_tokens=True)
 
5
 
6
  MODEL_NAME = "osmankoc/llama-2-7b-zoa"
7
 
8
+ # Model ve tokenizer'ı önceden yükle
9
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
10
+ model = AutoModelForCausalLM.from_pretrained(
11
+ MODEL_NAME, torch_dtype=torch.float16, device_map="auto"
12
+ )
13
+
14
  # ZeroGPU için model GPU'ya sadece gerektiğinde yüklenecek
15
  @spaces.GPU
16
  def generate(prompt):
 
 
 
 
17
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
18
  output = model.generate(**inputs, max_length=500)
19
  response = tokenizer.decode(output[0], skip_special_tokens=True)