|
|
|
import streamlit as st |
|
from transformers import T5Tokenizer, T5ForConditionalGeneration |
|
|
|
st.title("Text-to-Text AI Model") |
|
|
|
st.write("Enter your prompt below:") |
|
|
|
prompt = st.text_input("Prompt") |
|
|
|
if prompt: |
|
tokenizer = T5Tokenizer.from_pretrained('t5-small') |
|
model = T5ForConditionalGeneration.from_pretrained('t5-small') |
|
|
|
input_ids = tokenizer.encode(prompt, return_tensors='pt') |
|
output = model.generate(input_ids) |
|
output_text = tokenizer.decode(output[0], skip_special_tokens=True) |
|
|
|
st.write("Output:") |
|
st.write(output_text) |
|
|