# from transformers import pipeline | |
import gradio as gr | |
from pythainlp.tokenize import subword_tokenize | |
# model = pipeline( | |
# "summarization", | |
# ) | |
def predict(prompt): | |
# summary = model(prompt)[0]["summary_text"] | |
summary = subword_tokenize(prompt, engine='tcc') | |
return summary | |
# create an interface for the model | |
with gr.Interface(predict, "textbox", "text") as interface: | |
interface.launch() |