import gradio as gr from transformers import T5ForConditionalGeneration, T5Tokenizer # Load model & tokenizer once model_name = "utrobinmv/t5_summary_en_ru_zh_base_2048" model = T5ForConditionalGeneration.from_pretrained(model_name) tokenizer = T5Tokenizer.from_pretrained(model_name) def summarize_email(email_text, mode): prefix_map = { "Short Summary": "summary to en: ", "Detailed Summary": "summary big to en: ", "Brief Summary": "summary brief to en: " } prefix = prefix_map.get(mode, "summary to en: ") input_text = prefix + email_text inputs = tokenizer(input_text, return_tensors="pt", max_length=1024, truncation=True) outputs = model.generate(**inputs, max_new_tokens=200) return tokenizer.decode(outputs[0], skip_special_tokens=True) demo = gr.Interface( fn=summarize_email, inputs=[ gr.Textbox(lines=12, placeholder="Paste your email text here...", label="Email Text"), gr.Radio(["Short Summary", "Detailed Summary", "Brief Summary"], label="Mode", value="Short Summary") ], outputs="text", title="Email Summarizer (T5)", description="Paste your email, select summary mode, and get a concise version." ) if __name__ == "__main__": demo.launch()