Spaces:
Sleeping
Sleeping
Kieran Gookey
commited on
Commit
·
16dcc46
1
Parent(s):
242bba0
Added query
Browse files
app.py
CHANGED
@@ -16,10 +16,13 @@ embed_model_name = st.text_input(
|
|
16 |
llm_model_name = st.text_input(
|
17 |
'Embed Model name', "mistralai/Mistral-7B-Instruct-v0.2")
|
18 |
|
|
|
|
|
|
|
19 |
html_file = st.file_uploader("Upload a html file", type=["html"])
|
20 |
|
21 |
if st.button('Start Pipeline'):
|
22 |
-
if html_file is not None and embed_model_name is not None and llm_model_name is not None:
|
23 |
st.write('Running Pipeline')
|
24 |
llm = HuggingFaceInferenceAPI(
|
25 |
model_name=llm_model_name, token=inference_api_key)
|
@@ -54,7 +57,7 @@ if st.button('Start Pipeline'):
|
|
54 |
retriever = index.as_retriever()
|
55 |
|
56 |
ranked_nodes = retriever.retrieve(
|
57 |
-
|
58 |
|
59 |
with st.expander("Ranked Nodes"):
|
60 |
for node in ranked_nodes:
|
@@ -63,8 +66,7 @@ if st.button('Start Pipeline'):
|
|
63 |
query_engine = index.as_query_engine(
|
64 |
filters=filters, service_context=service_context)
|
65 |
|
66 |
-
response = query_engine.query(
|
67 |
-
"Get me all the information about the product")
|
68 |
|
69 |
st.write(response)
|
70 |
|
|
|
16 |
llm_model_name = st.text_input(
|
17 |
'Embed Model name', "mistralai/Mistral-7B-Instruct-v0.2")
|
18 |
|
19 |
+
query = st.text_input(
|
20 |
+
'Query', "What is the price of the product?")
|
21 |
+
|
22 |
html_file = st.file_uploader("Upload a html file", type=["html"])
|
23 |
|
24 |
if st.button('Start Pipeline'):
|
25 |
+
if html_file is not None and embed_model_name is not None and llm_model_name is not None and query is not None:
|
26 |
st.write('Running Pipeline')
|
27 |
llm = HuggingFaceInferenceAPI(
|
28 |
model_name=llm_model_name, token=inference_api_key)
|
|
|
57 |
retriever = index.as_retriever()
|
58 |
|
59 |
ranked_nodes = retriever.retrieve(
|
60 |
+
query)
|
61 |
|
62 |
with st.expander("Ranked Nodes"):
|
63 |
for node in ranked_nodes:
|
|
|
66 |
query_engine = index.as_query_engine(
|
67 |
filters=filters, service_context=service_context)
|
68 |
|
69 |
+
response = query_engine.query(query)
|
|
|
70 |
|
71 |
st.write(response)
|
72 |
|