File size: 1,508 Bytes
8ee963e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b574e1c
8ee963e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
#!/usr/bin/env python3

import os
import gradio as gr
from dotenv import load_dotenv
import s3fs

load_dotenv('myenvfile.env')

os.environ['OPENAI_API_KEY'] = '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'
os.environ['AWS_ACCESS_KEY_ID']="AKIAZOU6TJIYU64BCGHE"
os.environ['AWS_SECRET_ACCESS_KEY']="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"
from llama_index import GPTListIndex, GPTSimpleVectorIndex
from langchain.agents import load_tools, Tool, initialize_agent
from langchain.llms import OpenAI
from langchain.agents import ZeroShotAgent, Tool, AgentExecutor
from langchain.agents import initialize_agent, Tool
from langchain import OpenAI, LLMChain
from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader



index = GPTSimpleVectorIndex.load_from_disk('index.json')


def querying_db(query: str):
	response = index.query(query)
	return response


tools = [
	Tool(
		name="QueryingDB",
		func=querying_db,
		description="This function takes a query string as input and returns the most relevant answer from the documentation as output"
	)
]

llm = OpenAI(temperature=0)


def get_answer(query_string):
	agent = initialize_agent(tools, llm, agent="zero-shot-react-description")
	result = agent.run(query_string)
	return result


def qa_app(query):
	answer = get_answer(query)
	return answer


inputs = gr.inputs.Textbox(label="Enter your question:")
output = gr.outputs.Textbox(label="Answer:")
gr.Interface(fn=qa_app, inputs=inputs, outputs=output, title="Query Answering App").launch()