Spaces:
Runtime error
Runtime error
remove llama2 tokenizer with token
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ from transformers import AutoTokenizer
|
|
6 |
|
7 |
client = InferenceClient(model="https://1b66-141-3-25-29.ngrok-free.app")
|
8 |
#client = InferenceClient(model="http://i13hpc68:8056")
|
9 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
10 |
|
11 |
|
12 |
def inference(message):
|
|
|
6 |
|
7 |
client = InferenceClient(model="https://1b66-141-3-25-29.ngrok-free.app")
|
8 |
#client = InferenceClient(model="http://i13hpc68:8056")
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("enoch/llama-65b-hf", padding_side='left',return_token_type_ids=False)
|
10 |
|
11 |
|
12 |
def inference(message):
|