Update app.py
Browse files
app.py
CHANGED
@@ -18,8 +18,9 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
18 |
def get_model():
|
19 |
#tokenizer = AutoTokenizer.from_pretrained("BigSalmon/MASKGPT2")
|
20 |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/MASKGPT2")
|
21 |
-
tokenizer = GPTNeoXTokenizerFast.from_pretrained("CarperAI/FIM-NeoX-1.3B")
|
22 |
#model = GPTNeoXForCausalLM.from_pretrained("BigSalmon/FormalInformalConcise-FIM-NeoX-1.3B")
|
|
|
23 |
model = AutoModelForCausalLM.from_pretrained("BigSalmon/ConvertLowercaseToUppercase3")
|
24 |
return model, tokenizer
|
25 |
|
|
|
18 |
def get_model():
|
19 |
#tokenizer = AutoTokenizer.from_pretrained("BigSalmon/MASKGPT2")
|
20 |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/MASKGPT2")
|
21 |
+
#tokenizer = GPTNeoXTokenizerFast.from_pretrained("CarperAI/FIM-NeoX-1.3B")
|
22 |
#model = GPTNeoXForCausalLM.from_pretrained("BigSalmon/FormalInformalConcise-FIM-NeoX-1.3B")
|
23 |
+
tokenizer = AutoTokenizer.from_pretrained("xhyi/PT_GPTNEO350_ATG")
|
24 |
model = AutoModelForCausalLM.from_pretrained("BigSalmon/ConvertLowercaseToUppercase3")
|
25 |
return model, tokenizer
|
26 |
|