Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ from langchain.agents.openai_assistant import OpenAIAssistantRunnable
|
|
10 |
from langchain.schema import HumanMessage, AIMessage
|
11 |
|
12 |
api_key = os.getenv('OPENAI_API_KEY')
|
13 |
-
extractor_agent = os.getenv('
|
14 |
|
15 |
# Create the assistant. By default, we don't specify a thread_id,
|
16 |
# so the first call that doesn't pass one will create a new thread.
|
@@ -61,7 +61,7 @@ def predict(message, history):
|
|
61 |
# Create a Gradio ChatInterface using our predict function
|
62 |
chat = gr.ChatInterface(
|
63 |
fn=predict,
|
64 |
-
title="Solution Specifier
|
65 |
#description="Testing threaded conversation"
|
66 |
)
|
67 |
chat.launch(share=True)
|
|
|
10 |
from langchain.schema import HumanMessage, AIMessage
|
11 |
|
12 |
api_key = os.getenv('OPENAI_API_KEY')
|
13 |
+
extractor_agent = os.getenv('ASSISTANT_ID_SOLUTION_SPECIFIER_B')
|
14 |
|
15 |
# Create the assistant. By default, we don't specify a thread_id,
|
16 |
# so the first call that doesn't pass one will create a new thread.
|
|
|
61 |
# Create a Gradio ChatInterface using our predict function
|
62 |
chat = gr.ChatInterface(
|
63 |
fn=predict,
|
64 |
+
title="Solution Specifier B",
|
65 |
#description="Testing threaded conversation"
|
66 |
)
|
67 |
chat.launch(share=True)
|