tokenizer / app.py
Furkan Akkurt
update app
98d296a
raw
history blame
469 Bytes
import gradio as gr
import bap_preprocessing
def tokenize(data):
json_data = json.loads(data)
response = bap_preprocessing.tokenize(json_data['text'])
result = { "tokens": response }
return result
demo = gr.Interface(
fn=tokenize,
inputs="text",
outputs="text",
title="Tokenizer",
examples=[
"Ben oraya geliyorum.",
"Sen neden gelmiyorsun?"
]
)
demo.launch()