|
import os |
|
|
|
os.environ["ANTHROPIC_API_KEY"] = "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" |
|
import gradio as gr |
|
from langchain_community.memory import ConversationBufferMemory |
|
from langchain_community.chains import RetrievalQA |
|
from langchain_community.embeddings import HuggingFaceEmbeddings |
|
from langchain_community.vectorstores import Chroma |
|
from langchain_community.chat_models import ChatAnthropic |
|
|
|
model_kwargs = {'trust_remote_code': True} |
|
embeddings = HuggingFaceEmbeddings(model_name="jinaai/jina-embeddings-v2-base-en", |
|
model_kwargs=model_kwargs) |
|
llm = ChatAnthropic(model='claude-2', |
|
temperature=0) |
|
|
|
|
|
persist_directory = os.path.expanduser('~/Electric_Machinerydb') |
|
embeddings = embeddings |
|
chroma_db = Chroma(persist_directory=persist_directory, embedding_function=embeddings) |
|
retriever = chroma_db.as_retriever(search_kwargs={"k": 3}, return_source_documents=True) |
|
|
|
|
|
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True, input_key="query", output_key="result") |
|
|
|
|
|
qa_chain = RetrievalQA.from_chain_type( |
|
llm=llm, |
|
chain_type="stuff", |
|
retriever=retriever, |
|
return_source_documents=True, |
|
memory=memory, |
|
output_key="result" |
|
) |
|
|
|
|
|
def ask_question(question, chat_history): |
|
query = question.strip() |
|
if query: |
|
result = qa_chain({"query": query}) |
|
answer = result['result'] |
|
chat_history.append((query, answer)) |
|
metadata = show_metadata(chat_history) |
|
return "", chat_history, metadata |
|
else: |
|
chat_history.append(("", "Please enter a question.")) |
|
return "", chat_history, "" |
|
|
|
def show_metadata(chat_history): |
|
if chat_history: |
|
query, answer = chat_history[-1] |
|
result = qa_chain({"query": query}) |
|
metadata = "" |
|
for doc in result['source_documents']: |
|
metadata += f"Page: {doc.metadata['page']}\n" |
|
metadata += f"Source: {doc.metadata['source']}\n" |
|
metadata += f"Content: {doc.page_content}\n" |
|
metadata += "---\n" |
|
return metadata |
|
return "" |
|
|
|
def clean_history(): |
|
memory.clear() |
|
return [], "", "" |
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("# Electric Machinery QA by Tamil") |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=2): |
|
question = gr.Textbox(label="Question") |
|
ask_btn = gr.Button("Ask") |
|
clean_btn = gr.Button("Clean") |
|
chatbot = gr.Chatbot(label="Conversation") |
|
|
|
with gr.Column(scale=1): |
|
gr.Markdown("## Metadata") |
|
metadata_output = gr.Textbox(label="Source Information", lines=10) |
|
|
|
ask_btn.click(ask_question, inputs=[question, chatbot], outputs=[question, chatbot, metadata_output]) |
|
clean_btn.click(clean_history, inputs=[], outputs=[chatbot, question, metadata_output]) |
|
|
|
demo.launch() |