endoai / app.py
omlakhani's picture
Update app.py
b574e1c
raw
history blame
1.51 kB
#!/usr/bin/env python3
import os
import gradio as gr
from dotenv import load_dotenv
import s3fs
load_dotenv('myenvfile.env')
os.environ['OPENAI_API_KEY'] = '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'
os.environ['AWS_ACCESS_KEY_ID']="AKIAZOU6TJIYU64BCGHE"
os.environ['AWS_SECRET_ACCESS_KEY']="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"
from llama_index import GPTListIndex, GPTSimpleVectorIndex
from langchain.agents import load_tools, Tool, initialize_agent
from langchain.llms import OpenAI
from langchain.agents import ZeroShotAgent, Tool, AgentExecutor
from langchain.agents import initialize_agent, Tool
from langchain import OpenAI, LLMChain
from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader
index = GPTSimpleVectorIndex.load_from_disk('index.json')
def querying_db(query: str):
response = index.query(query)
return response
tools = [
Tool(
name="QueryingDB",
func=querying_db,
description="This function takes a query string as input and returns the most relevant answer from the documentation as output"
)
]
llm = OpenAI(temperature=0)
def get_answer(query_string):
agent = initialize_agent(tools, llm, agent="zero-shot-react-description")
result = agent.run(query_string)
return result
def qa_app(query):
answer = get_answer(query)
return answer
inputs = gr.inputs.Textbox(label="Enter your question:")
output = gr.outputs.Textbox(label="Answer:")
gr.Interface(fn=qa_app, inputs=inputs, outputs=output, title="Query Answering App").launch()