|
from transformers import AutoTokenizer |
|
from transformers import AutoModelForSequenceClassification |
|
import torch |
|
import gradio as gr |
|
import numpy as np |
|
|
|
labels = ['Not Acceptale', "Acceptable"] |
|
model_name = "abdulmatinomotoso/English_Grammar_Checker" |
|
model = AutoModelForSequenceClassification.from_pretrained(model_name) |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
def check_grammar(sentence): |
|
input_tensor = tokenizer.encode(sentence, return_tensors="pt") |
|
logits = model(input_tensor).logits |
|
|
|
softmax = torch.nn.Softmax(dim=1) |
|
probs = softmax(logits)[0] |
|
probs = probs.cpu().detach().numpy() |
|
max_index = np.argmax(probs) |
|
result = labels[max_index] |
|
|
|
return result |
|
|
|
demo = gr.Interface(check_grammar, inputs=['text'], |
|
outputs="text", |
|
title = "English Grammar Checker") |
|
|
|
if __name__ == "__main__": |
|
demo.launch(debug=True) |