Spaces:
Build error
Build error
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
3 |
+
from transformers import pipeline, Pipeline
|
4 |
+
from transformers.pipelines import PipelineException
|
5 |
+
from huggingface_hub.utils import ModelNotFoundError
|
6 |
+
import logging
|
7 |
+
|
8 |
+
# Set up logging
|
9 |
+
logging.basicConfig(level=logging.INFO)
|
10 |
+
logger = logging.getLogger(__name__)
|
11 |
+
|
12 |
+
# Initialize Hugging Face Hub search component
|
13 |
+
search_in = HuggingfaceHubSearch(api_key="hf_YourAPITokenHere", submit_on_select=True)
|
14 |
+
|
15 |
+
# Function to load the selected model and create a pipeline
|
16 |
+
def load_model(model_id):
|
17 |
+
try:
|
18 |
+
logger.info(f"Loading model: {model_id}")
|
19 |
+
model_pipeline = pipeline(model=model_id)
|
20 |
+
logger.info("Model loaded successfully.")
|
21 |
+
return model_pipeline
|
22 |
+
except ModelNotFoundError:
|
23 |
+
logger.error(f"Model '{model_id}' not found.")
|
24 |
+
return None
|
25 |
+
except PipelineException as e:
|
26 |
+
logger.error(f"Error creating pipeline: {e}")
|
27 |
+
return None
|
28 |
+
except Exception as e:
|
29 |
+
logger.error(f"Unexpected error: {e}")
|
30 |
+
return None
|
31 |
+
|
32 |
+
# Function to process input data using the loaded pipeline
|
33 |
+
def process_input(model_pipeline, input_data):
|
34 |
+
try:
|
35 |
+
logger.info("Processing input data.")
|
36 |
+
output = model_pipeline(input_data)
|
37 |
+
logger.info("Processing complete.")
|
38 |
+
return output
|
39 |
+
except Exception as e:
|
40 |
+
logger.error(f"Error during processing: {e}")
|
41 |
+
return None
|
42 |
+
|
43 |
+
# Gradio interface setup
|
44 |
+
def create_interface():
|
45 |
+
with gr.Blocks() as demo:
|
46 |
+
gr.Markdown("# Transformers Pipeline Playground")
|
47 |
+
model_id = gr.Textbox(label="Enter Model ID from Hugging Face Hub")
|
48 |
+
input_data = gr.Textbox(label="Input Data")
|
49 |
+
output_data = gr.Textbox(label="Output Data")
|
50 |
+
load_button = gr.Button("Load Model")
|
51 |
+
process_button = gr.Button("Process Input")
|
52 |
+
|
53 |
+
# Load model on button click
|
54 |
+
def on_load_click():
|
55 |
+
model_pipeline = load_model(model_id.value)
|
56 |
+
if model_pipeline:
|
57 |
+
process_button.click(
|
58 |
+
fn=lambda: process_input(model_pipeline, input_data.value),
|
59 |
+
inputs=[],
|
60 |
+
outputs=output_data,
|
61 |
+
)
|
62 |
+
else:
|
63 |
+
output_data.value = "Failed to load model."
|
64 |
+
|
65 |
+
load_button.click(on_load_click, inputs=[], outputs=[])
|
66 |
+
|
67 |
+
return demo
|
68 |
+
|
69 |
+
# Run the Gradio interface
|
70 |
+
if __name__ == "__main__":
|
71 |
+
demo = create_interface()
|
72 |
+
demo.launch()
|