Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -7,9 +7,9 @@ title = "🤖AI ChatBot"
|
|
7 |
description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
|
8 |
examples = [["How are you?"]]
|
9 |
|
10 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
11 |
-
model = AutoModelForCausalLM.from_pretrained("
|
12 |
-
|
13 |
#tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
14 |
#model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
15 |
#The model was loaded with use_flash_attention_2=True, which is deprecated and may be removed in a future release. Please use `attn_implementation="flash_attention_2"` instead.
|
|
|
7 |
description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
|
8 |
examples = [["How are you?"]]
|
9 |
|
10 |
+
tokenizer = AutoTokenizer.from_pretrained("models/google/gemma-2-2b-it")
|
11 |
+
model = AutoModelForCausalLM.from_pretrained("models/google/gemma-2-2b-it", torch_dtype=torch.float16)
|
12 |
+
#stvlynn/Gemma-2-2b-Chinese-it
|
13 |
#tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
14 |
#model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
15 |
#The model was loaded with use_flash_attention_2=True, which is deprecated and may be removed in a future release. Please use `attn_implementation="flash_attention_2"` instead.
|