File size: 1,992 Bytes
5cac4b6
542b27b
ed5c205
db4681c
ed5c205
02ac06a
5cac4b6
ed5c205
5cac4b6
542b27b
5cac4b6
b0b475b
7f5bdbb
542b27b
 
 
7f5bdbb
542b27b
7f5bdbb
db4681c
 
542b27b
768188f
db4681c
7f5bdbb
542b27b
 
 
7f5bdbb
542b27b
768188f
542b27b
 
7f5bdbb
768188f
 
7f5bdbb
768188f
db4681c
b0b475b
aa4acd3
 
 
 
 
 
 
db4681c
 
 
768188f
1d48c78
 
768188f
db4681c
542b27b
 
 
 
1d48c78
db4681c
 
b0b475b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
import gradio as gr

# Model name and Hugging Face token
MODEL_NAME = "Pisethan/sangapac-math"
TOKEN = os.getenv("HF_API_TOKEN")

if not TOKEN:
    raise ValueError("Hugging Face API token not found. Set it as an environment variable (HF_API_TOKEN).")

# Load model and tokenizer
try:
    tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, use_auth_token=TOKEN)
    model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME, use_auth_token=TOKEN)
    generator = pipeline("text2text-generation", model=model, tokenizer=tokenizer)
except Exception as e:
    generator = None
    print(f"Error loading model or tokenizer: {e}")

def predict(input_text):
    if generator is None:
        return "Model not loaded properly.", {"Error": "Model not loaded properly."}

    try:
        # Generate output
        result = generator(input_text, max_length=256, num_beams=5, early_stopping=True)
        generated_text = result[0]["generated_text"]

        simple_result = f"Generated Solution:\n{generated_text}"
        detailed_result = {
            "Input": input_text,
            "Generated Solution": generated_text,
        }

        return simple_result, detailed_result
    except Exception as e:
        return "An error occurred.", {"Error": str(e)}

# Gradio interface
sample_inputs = [
    ["1 + 1 = ?"],
    ["(5 + 3) × 2 = ?"],
    ["12 ÷ 4 = ?"],
    ["Solve for x: x + 5 = 10"],
]

interface = gr.Interface(
    fn=predict,
    inputs=gr.Textbox(lines=2, placeholder="Enter a math problem..."),
    outputs=[
        gr.Textbox(label="Simple Output"),
        gr.JSON(label="Detailed JSON Output"),
    ],
    title="Sangapac Math Model",
    description=(
        "A model that solves math problems and provides step-by-step solutions. "
        "Examples include Arithmetic, Multiplication, Division, Algebra, and Geometry problems."
    ),
    examples=sample_inputs,
)

interface.launch()