GWENCAK / app.py
DeriosMarcos's picture
Update app.py
568f87d verified
raw
history blame contribute delete
891 Bytes
import gradio as gr
from transformers import pipeline
from transformers_modules.Qwen.Qwen2.5-Math-PRM-72B.configuration_qwen2_rm import Qwen2RMConfig
from transformers_modules.Qwen.Qwen2.5-Math-PRM-72B.modeling_qwen2_rm import Qwen2RMForSequenceClassification
model = Qwen2RMForSequenceClassification.from_pretrained("Qwen/Qwen2.5-Math-PRM-72B")
pipe = pipeline("text-classification", model=model)
# Load the model
pipe = pipeline("text-classification", model="Qwen/Qwen2.5-Math-PRM-72B", trust_remote_code=True)
# Define a function to generate responses
def generate_response(prompt):
output = pipe(prompt)
return output[0]['label']
# Create a Gradio interface
iface = gr.Interface(
fn=generate_response,
inputs="text",
outputs="text",
title="Qwen2.5 Math Model",
description="Ask a math question and get an answer!"
)
# Launch the interface
iface.launch()