Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, TFAutoModelForSeq2SeqLM | |
import gradio as gr | |
checkpoint = "Hemanth-thunder/english-tamil-mt" | |
def language_translator(text): | |
tokenizer = AutoTokenizer.from_pretrained(checkpoint) | |
model = TFAutoModelForSeq2SeqLM.from_pretrained("finetune-EN-to-Ta/") | |
tokenized = tokenizer([text], return_tensors='np') | |
out = model.generate(**tokenized, max_length=128) | |
with tokenizer.as_target_tokenizer(): | |
return tokenizer.decode(out[0],skip_special_tokens=True) | |
examples = [ | |
["Hello, how are you today?"], | |
["Translate this sentence into another language."], | |
["how to play a game"], | |
] | |
demo = gr.Interface(fn=language_translator, inputs='text',outputs='text',title='English To Tamil Translator',examples=examples) | |
demo.launch(debug=True,share=True) |