Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -41,20 +41,27 @@ def generate_response(prompt, max_length=1024):
|
|
| 41 |
{"role": "user", "content": prompt}
|
| 42 |
]
|
| 43 |
|
| 44 |
-
# Generate response using pipeline
|
| 45 |
outputs = pipe(messages, max_new_tokens=max_length)
|
| 46 |
|
| 47 |
-
#
|
| 48 |
-
response = outputs[0]["generated_text"]
|
| 49 |
-
|
| 50 |
-
# Find the user's prompt in the response and get everything after it
|
| 51 |
try:
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
|
| 57 |
-
return
|
|
|
|
| 58 |
|
| 59 |
# Example with proper line breaks
|
| 60 |
example_prompt = """For a scalar field theory with interaction Lagrangian $\mathcal{L}_{int} = g\phi^3 + \lambda\phi^4$:
|
|
@@ -78,7 +85,7 @@ demo = gr.Interface(
|
|
| 78 |
],
|
| 79 |
outputs=gr.Textbox(label="Response", lines=10),
|
| 80 |
title="benhaotang/phi4-qwq-sky-t1",
|
| 81 |
-
description=f""" To achieve CoT and science reasoning on small scale
|
| 82 |
|
| 83 |
Model: [benhaotang/phi4-qwq-sky-t1]({MODEL_URL})""",
|
| 84 |
examples=[
|
|
|
|
| 41 |
{"role": "user", "content": prompt}
|
| 42 |
]
|
| 43 |
|
|
|
|
| 44 |
outputs = pipe(messages, max_new_tokens=max_length)
|
| 45 |
|
| 46 |
+
# Find assistant's response in the output
|
|
|
|
|
|
|
|
|
|
| 47 |
try:
|
| 48 |
+
# The output contains the full conversation history
|
| 49 |
+
generated_text = outputs[0]["generated_text"]
|
| 50 |
+
# Look for the last assistant message
|
| 51 |
+
assistant_prefix = "{'role': 'assistant', 'content': '"
|
| 52 |
+
assistant_start = generated_text.rfind(assistant_prefix)
|
| 53 |
+
if assistant_start != -1:
|
| 54 |
+
# Move past the prefix
|
| 55 |
+
content_start = assistant_start + len(assistant_prefix)
|
| 56 |
+
# Find the end of the content (before the closing quote and brace)
|
| 57 |
+
content_end = generated_text.rfind("'}")
|
| 58 |
+
if content_end != -1:
|
| 59 |
+
return generated_text[content_start:content_end]
|
| 60 |
+
except Exception as e:
|
| 61 |
+
print(f"Error extracting response: {e}")
|
| 62 |
|
| 63 |
+
# Fallback: return the raw generated text if extraction fails
|
| 64 |
+
return outputs[0]["generated_text"]
|
| 65 |
|
| 66 |
# Example with proper line breaks
|
| 67 |
example_prompt = """For a scalar field theory with interaction Lagrangian $\mathcal{L}_{int} = g\phi^3 + \lambda\phi^4$:
|
|
|
|
| 85 |
],
|
| 86 |
outputs=gr.Textbox(label="Response", lines=10),
|
| 87 |
title="benhaotang/phi4-qwq-sky-t1",
|
| 88 |
+
description=f""" To achieve CoT and science reasoning on small scale with a merge of CoT finetuned phi4 model.
|
| 89 |
|
| 90 |
Model: [benhaotang/phi4-qwq-sky-t1]({MODEL_URL})""",
|
| 91 |
examples=[
|