import textwrap from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("Hemanth-thunder/english-tamil-mt") model = AutoModelForSeq2SeqLM.from_pretrained("Hemanth-thunder/english-tamil-mt") text_to_translate = "Hello, Universe! I am a test translation tool created with the help of a custom Large Language Model (LLM)." model_inputs = tokenizer(text_to_translate, return_tensors = "pt") #Translate to Tamil gen_tokens = model.generate(**model_inputs, forced_bos_token_id=tokenizer.get_lang_id("hi")) translated_text = "".join(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)) owidth = 80 input_text = textwrap.fill(text_to_translate, width=owidth) wrapped_output = textwrap.fill(translated_text, width=owidth) print(f"Input Text: {input_text}") print(f"Output Text: {wrapped_output}")