Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,71 +1,71 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import requests
|
3 |
-
|
4 |
-
def call_api(query: str) -> str:
|
5 |
-
"""
|
6 |
-
Calls the public API with the given query.
|
7 |
-
"""
|
8 |
-
url = "https://adityashriv-refudgee-crisis-deepseek.hf.space/query/"
|
9 |
-
params = {"input_text": query}
|
10 |
-
headers = {"accept": "application/json"}
|
11 |
-
|
12 |
-
try:
|
13 |
-
response = requests.get(url, params=params, headers=headers)
|
14 |
-
response.raise_for_status() # raise exception for HTTP errors
|
15 |
-
json_data = response.json()
|
16 |
-
if isinstance(json_data, dict):
|
17 |
-
result = response["response"]
|
18 |
-
else:
|
19 |
-
result = json_data
|
20 |
-
except Exception as e:
|
21 |
-
result = f"Error: {e}"
|
22 |
-
|
23 |
-
return result
|
24 |
-
|
25 |
-
def chat_response(user_message: str, history: list) -> tuple:
|
26 |
-
"""
|
27 |
-
Processes a chat message by sending the query to the API and updating
|
28 |
-
the conversation history in openai-style format.
|
29 |
-
"""
|
30 |
-
reply = call_api(user_message)
|
31 |
-
# Append the user's message and the API response using dictionary format.
|
32 |
-
history.append({"role": "user", "content": user_message})
|
33 |
-
history.append({"role": "assistant", "content": reply})
|
34 |
-
return "", history
|
35 |
-
|
36 |
-
# Sample questions as lists, which will populate the input textbox.
|
37 |
-
sample_questions = [
|
38 |
-
["How many cases of refugee crises have we had in the US in 1 year?"],
|
39 |
-
["What is the current status of the refugee crisis in Europe?"],
|
40 |
-
["Tell me about refugee trends in the Middle East."],
|
41 |
-
["How does the refugee crisis affect the economy in the US?"],
|
42 |
-
["What policies are in place to address refugee crises?"]
|
43 |
-
]
|
44 |
-
|
45 |
-
with gr.Blocks() as demo:
|
46 |
-
gr.Markdown(
|
47 |
-
"""
|
48 |
-
# Refugee Crisis Query Chat
|
49 |
-
|
50 |
-
**Note:** The first query may take some time to respond due to model cold start.
|
51 |
-
|
52 |
-
Enter your query below or select one of the sample questions to get started.
|
53 |
-
"""
|
54 |
-
)
|
55 |
-
|
56 |
-
user_input = gr.Textbox(show_label=False, placeholder="Enter your query here...", lines=1)
|
57 |
-
|
58 |
-
gr.Examples(
|
59 |
-
examples=sample_questions,
|
60 |
-
inputs=[user_input],
|
61 |
-
label="Sample Questions"
|
62 |
-
)
|
63 |
-
|
64 |
-
# Specify type="messages" for the Chatbot component.
|
65 |
-
chatbot = gr.Chatbot(type="messages")
|
66 |
-
state = gr.State([])
|
67 |
-
send_button = gr.Button("Send")
|
68 |
-
|
69 |
-
send_button.click(chat_response, inputs=[user_input, state], outputs=[user_input, chatbot])
|
70 |
-
|
71 |
-
demo.launch()
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import requests
|
3 |
+
|
4 |
+
def call_api(query: str) -> str:
|
5 |
+
"""
|
6 |
+
Calls the public API with the given query.
|
7 |
+
"""
|
8 |
+
url = "https://adityashriv-refudgee-crisis-deepseek.hf.space/query/"
|
9 |
+
params = {"input_text": query}
|
10 |
+
headers = {"accept": "application/json"}
|
11 |
+
|
12 |
+
try:
|
13 |
+
response = requests.get(url, params=params, headers=headers)
|
14 |
+
response.raise_for_status() # raise exception for HTTP errors
|
15 |
+
json_data = response.json()
|
16 |
+
if isinstance(json_data, dict):
|
17 |
+
result = response["response"]
|
18 |
+
else:
|
19 |
+
result = json_data
|
20 |
+
except Exception as e:
|
21 |
+
result = f"Error: {e}"
|
22 |
+
|
23 |
+
return result
|
24 |
+
|
25 |
+
def chat_response(user_message: str, history: list) -> tuple:
|
26 |
+
"""
|
27 |
+
Processes a chat message by sending the query to the API and updating
|
28 |
+
the conversation history in openai-style format.
|
29 |
+
"""
|
30 |
+
reply = call_api(user_message)
|
31 |
+
# Append the user's message and the API response using dictionary format.
|
32 |
+
history.append({"role": "user", "content": user_message})
|
33 |
+
history.append({"role": "assistant", "content": reply})
|
34 |
+
return "", history
|
35 |
+
|
36 |
+
# Sample questions as lists, which will populate the input textbox.
|
37 |
+
sample_questions = [
|
38 |
+
["How many cases of refugee crises have we had in the US in 1 year?"],
|
39 |
+
["What is the current status of the refugee crisis in Europe?"],
|
40 |
+
["Tell me about refugee trends in the Middle East."],
|
41 |
+
["How does the refugee crisis affect the economy in the US?"],
|
42 |
+
["What policies are in place to address refugee crises?"]
|
43 |
+
]
|
44 |
+
|
45 |
+
with gr.Blocks() as demo:
|
46 |
+
gr.Markdown(
|
47 |
+
"""
|
48 |
+
# Refugee Crisis Query Chat
|
49 |
+
|
50 |
+
**Note:** The first query may take some time to respond due to model cold start.
|
51 |
+
|
52 |
+
Enter your query below or select one of the sample questions to get started.
|
53 |
+
"""
|
54 |
+
)
|
55 |
+
|
56 |
+
user_input = gr.Textbox(show_label=False, placeholder="Enter your query here...", lines=1)
|
57 |
+
|
58 |
+
gr.Examples(
|
59 |
+
examples=sample_questions,
|
60 |
+
inputs=[user_input],
|
61 |
+
label="Sample Questions"
|
62 |
+
)
|
63 |
+
|
64 |
+
# Specify type="messages" for the Chatbot component.
|
65 |
+
chatbot = gr.Chatbot(type="messages")
|
66 |
+
state = gr.State([])
|
67 |
+
send_button = gr.Button("Send")
|
68 |
+
|
69 |
+
send_button.click(chat_response, inputs=[user_input, state], outputs=[user_input, chatbot])
|
70 |
+
|
71 |
+
demo.launch(share=True)
|