Spaces:
Running
Running
File size: 2,508 Bytes
ba6e839 fe9f222 ba6e839 fe9f222 ba6e839 405d280 ba6e839 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import gradio as gr
import requests
def call_api(query: str) -> str:
"""
Calls the public API with the given query.
"""
url = "https://adityashriv-refudgee-crisis-deepseek.hf.space/query/"
params = {"input_text": query}
headers = {"accept": "application/json"}
try:
response = requests.get(url, params=params, headers=headers)
response.raise_for_status() # raise exception for HTTP errors
json_data = response.json()
if isinstance(json_data, dict):
result = response["response"]
else:
result = json_data
except Exception as e:
result = f"Error: {e}"
return result
def chat_response(user_message: str, history: list) -> tuple:
"""
Processes a chat message by sending the query to the API and updating
the conversation history in openai-style format.
"""
reply = call_api(user_message)
# Append the user's message and the API response using dictionary format.
history.append({"role": "user", "content": user_message})
history.append({"role": "assistant", "content": reply})
return "", history
# Sample questions as lists, which will populate the input textbox.
sample_questions = [
["What are the primary mental health challenges faced by refugee children?"],
["What is the current status of the refugee crisis in Europe?"],
["Tell me about refugee trends in the Middle East."],
["How can technology aid in addressing the mental health needs of refugee children?"],
["What policies are in place to address refugee crises?"]
]
with gr.Blocks() as demo:
gr.Markdown(
"""
# Refugee Crisis Query Chat
**Note:** The first query may take some time to respond due to model cold start.
Enter your query below or select one of the sample questions to get started.
"""
)
user_input = gr.Textbox(show_label=False, placeholder="Enter your query here...", lines=1)
gr.Examples(
examples=sample_questions,
inputs=[user_input],
label="Sample Questions"
)
# Specify type="messages" for the Chatbot component.
chatbot = gr.Chatbot(type="messages")
state = gr.State([])
send_button = gr.Button("Send")
send_button.click(chat_response, inputs=[user_input, state], outputs=[user_input, chatbot])
user_input.submit(chat_response, inputs=[user_input, state], outputs=[user_input, chatbot])
demo.launch(share=True)
|