Spaces:
Sleeping
Sleeping
added token use_auth_token=hf_token
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ else:
|
|
18 |
# π― Load CodeLlama 3B Model
|
19 |
model_name = "codellama/CodeLlama-3b"
|
20 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
21 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
22 |
|
23 |
# π Function to Generate Responses
|
24 |
def generate_response(prompt):
|
|
|
18 |
# π― Load CodeLlama 3B Model
|
19 |
model_name = "codellama/CodeLlama-3b"
|
20 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
21 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto",use_auth_token=hf_token)
|
22 |
|
23 |
# π Function to Generate Responses
|
24 |
def generate_response(prompt):
|