Spaces:
Running
Running
Upload utils.py
Browse files
utils.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sentence_transformers import SentenceTransformer
|
2 |
+
import pinecone
|
3 |
+
import openai
|
4 |
+
import streamlit as st
|
5 |
+
openai.api_key = "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"
|
6 |
+
model = SentenceTransformer('all-MiniLM-L6-v2')
|
7 |
+
|
8 |
+
pinecone.init(api_key='6f66d7f3-7478-4d25-9789-78cfef84ab52', environment='asia-southeast1-gcp-free')
|
9 |
+
index = pinecone.Index('langchain-chatbot')
|
10 |
+
|
11 |
+
def find_match(input):
|
12 |
+
input_em = model.encode(input).tolist()
|
13 |
+
result = index.query(input_em, top_k=2, includeMetadata=True)
|
14 |
+
return result['matches'][0]['metadata']['text']+"\n"+result['matches'][1]['metadata']['text']
|
15 |
+
|
16 |
+
def query_refiner(conversation, query):
|
17 |
+
|
18 |
+
response = openai.Completion.create(
|
19 |
+
model="text-davinci-003",
|
20 |
+
prompt=f"Given the following user query and conversation log, formulate a question that would be the most relevant to provide the user with an answer from a knowledge base.\n\nCONVERSATION LOG: \n{conversation}\n\nQuery: {query}\n\nRefined Query:",
|
21 |
+
temperature=0.7,
|
22 |
+
max_tokens=256,
|
23 |
+
top_p=1,
|
24 |
+
frequency_penalty=0,
|
25 |
+
presence_penalty=0
|
26 |
+
)
|
27 |
+
return response['choices'][0]['text']
|
28 |
+
|
29 |
+
def get_conversation_string():
|
30 |
+
conversation_string = ""
|
31 |
+
for i in range(len(st.session_state['responses'])-1):
|
32 |
+
|
33 |
+
conversation_string += "Human: "+st.session_state['requests'][i] + "\n"
|
34 |
+
conversation_string += "Bot: "+ st.session_state['responses'][i+1] + "\n"
|
35 |
+
return conversation_string
|