Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -54,7 +54,7 @@ LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
|
|
| 54 |
CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
|
| 55 |
|
| 56 |
# π€ Anthropic configuration - Teaching machines to be more human (and funnier)
|
| 57 |
-
|
| 58 |
|
| 59 |
# π§ Initialize session state - Because even apps need a good memory
|
| 60 |
if "chat_history" not in st.session_state:
|
|
@@ -315,18 +315,18 @@ def archive_current_container(database_name, container_name, client):
|
|
| 315 |
# π Search glossary - Finding needles in digital haystacks
|
| 316 |
def search_glossary(query):
|
| 317 |
st.markdown(f"### π SearchGlossary for: {query}")
|
| 318 |
-
# Dropdown for model selection
|
| 319 |
model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None']
|
| 320 |
-
|
| 321 |
-
# Dropdown for database selection
|
| 322 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 323 |
-
|
|
|
|
| 324 |
# π΅οΈββοΈ Searching the glossary for: query
|
| 325 |
all_results = ""
|
| 326 |
-
#
|
| 327 |
-
|
|
|
|
| 328 |
|
| 329 |
-
# π
|
| 330 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 331 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 332 |
result = client.predict(
|
|
@@ -351,11 +351,6 @@ def search_glossary(query):
|
|
| 351 |
st.markdown(result2)
|
| 352 |
#st.code(result2, language="python", line_numbers=True)
|
| 353 |
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
|
| 360 |
response2 = client.predict(
|
| 361 |
message=query, # str in 'parameter_13' Textbox component
|
|
|
|
| 54 |
CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
|
| 55 |
|
| 56 |
# π€ Anthropic configuration - Teaching machines to be more human (and funnier)
|
| 57 |
+
anthropicclient = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
|
| 58 |
|
| 59 |
# π§ Initialize session state - Because even apps need a good memory
|
| 60 |
if "chat_history" not in st.session_state:
|
|
|
|
| 315 |
# π Search glossary - Finding needles in digital haystacks
|
| 316 |
def search_glossary(query):
|
| 317 |
st.markdown(f"### π SearchGlossary for: {query}")
|
|
|
|
| 318 |
model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None']
|
| 319 |
+
model_choice = st.selectbox('π§ Select LLM Model', options=model_options, index=1, key=f"model_choice_{id(query)}")
|
|
|
|
| 320 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 321 |
+
database_choice = st.selectbox('π Select Database', options=database_options, index=0, key=f"database_choice_{id(query)}")
|
| 322 |
+
|
| 323 |
# π΅οΈββοΈ Searching the glossary for: query
|
| 324 |
all_results = ""
|
| 325 |
+
# Limit the query display to 80 characters
|
| 326 |
+
display_query = query[:80] + "..." if len(query) > 80 else query
|
| 327 |
+
st.markdown(f"π΅οΈββοΈ Running ArXiV AI Analysis with Query: {display_query} - ML model: {model_choice} and Option: {database_options}")
|
| 328 |
|
| 329 |
+
# π ArXiV RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
|
| 330 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 331 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 332 |
result = client.predict(
|
|
|
|
| 351 |
st.markdown(result2)
|
| 352 |
#st.code(result2, language="python", line_numbers=True)
|
| 353 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 354 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
|
| 355 |
response2 = client.predict(
|
| 356 |
message=query, # str in 'parameter_13' Textbox component
|