Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from fastapi import FastAPI
|
3 |
from pydantic import BaseModel
|
4 |
-
from transformers import
|
5 |
import torch
|
6 |
from threading import Thread
|
7 |
import uvicorn
|
@@ -11,8 +11,9 @@ import requests
|
|
11 |
app = FastAPI()
|
12 |
|
13 |
# Cargar el modelo y el tokenizador
|
14 |
-
|
15 |
-
|
|
|
16 |
|
17 |
class TextInput(BaseModel):
|
18 |
text: str
|
@@ -53,4 +54,4 @@ def predict_gradio(text):
|
|
53 |
return entities
|
54 |
|
55 |
demo = gr.Interface(fn=predict_gradio, inputs="text", outputs="json")
|
56 |
-
demo.launch(share=True)
|
|
|
1 |
import gradio as gr
|
2 |
from fastapi import FastAPI
|
3 |
from pydantic import BaseModel
|
4 |
+
from transformers import AutoTokenizer, AutoModelForTokenClassification
|
5 |
import torch
|
6 |
from threading import Thread
|
7 |
import uvicorn
|
|
|
11 |
app = FastAPI()
|
12 |
|
13 |
# Cargar el modelo y el tokenizador
|
14 |
+
model_name = "mdarhri00/named-entity-recognition"
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
+
model = AutoModelForTokenClassification.from_pretrained(model_name)
|
17 |
|
18 |
class TextInput(BaseModel):
|
19 |
text: str
|
|
|
54 |
return entities
|
55 |
|
56 |
demo = gr.Interface(fn=predict_gradio, inputs="text", outputs="json")
|
57 |
+
demo.launch(share=True)
|