Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
import torch
|
4 |
|
@@ -31,3 +31,19 @@ iface = gr.Interface(
|
|
31 |
)
|
32 |
|
33 |
iface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
import torch
|
4 |
|
|
|
31 |
)
|
32 |
|
33 |
iface.launch()
|
34 |
+
"""
|
35 |
+
from transformers import pipeline
|
36 |
+
import gradio as gr
|
37 |
+
|
38 |
+
# Load a pre-trained model (using mBART here for multilingual text generation)
|
39 |
+
model = pipeline("text2text-generation", model="facebook/mbart-large-50-many-to-one-mmt")
|
40 |
+
|
41 |
+
def convert_to_casual_hindi(text):
|
42 |
+
# Use the model to transform the text dynamically
|
43 |
+
# We assume the model will handle Hindi text and convert it
|
44 |
+
transformed_text = model(text)
|
45 |
+
return transformed_text[0]['generated_text']
|
46 |
+
|
47 |
+
# Gradio interface for deployment in Hugging Face Spaces
|
48 |
+
iface = gr.Interface(fn=convert_to_casual_hindi, inputs="text", outputs="text", title="Formal to Casual Hindi Converter")
|
49 |
+
iface.launch()
|