dioarafl commited on
Commit
ead8556
·
verified ·
1 Parent(s): 5bd1bac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -28
app.py CHANGED
@@ -1,23 +1,7 @@
1
- import requests
2
- from llama_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, ServiceContext, PromptHelper
3
- import gradio as gr
4
-
5
  import os
6
- from dotenv import load_dotenv
7
-
8
- # Load environment variables from .env file
9
- load_dotenv()
10
-
11
- # Get Hugging Face API key from environment variable
12
- hf_api_key = os.getenv("HF_API_KEY")
13
-
14
- API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
15
- headers = {"Authorization": f"Bearer {hf_api_key}"}
16
-
17
-
18
- def query(payload):
19
- response = requests.post(API_URL, headers=headers, json=payload)
20
- return response.json()
21
 
22
  def init_index(directory_path):
23
  # Model parameters
@@ -26,14 +10,14 @@ def init_index(directory_path):
26
  max_chunk_overlap = 20
27
  chunk_size_limit = 600
28
 
29
- # Prompt helper and predictor
30
  prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
31
- llm_predictor = LLMPredictor(llm=query)
32
 
33
- # Read documents from the "docs" folder
34
  documents = SimpleDirectoryReader(directory_path).load_data()
35
 
36
- # Initialize index with document data
37
  service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
38
  index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
39
 
@@ -51,14 +35,23 @@ def chatbot(input_text):
51
 
52
  return response.response
53
 
54
- # Create index
55
- init_index("docs")
 
56
 
57
- # Create UI interface to interact with the Hugging Face model
58
  iface = gr.Interface(fn=chatbot,
59
  inputs=gr.components.Textbox(lines=7, placeholder="Enter your question here"),
60
  outputs="text",
61
- title="Frost AI ChatBot: Your Knowledge Companion Powered by Hugging Face",
62
- description="Ask any question about rahasak research papers",
63
  allow_screenshot=True)
 
 
 
 
 
 
 
 
64
  iface.launch(share=True)
 
 
 
 
 
1
  import os
2
+ import gradio as gr
3
+ from llama_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, ServiceContext, PromptHelper
4
+ from langchain.chat_models import ChatOpenAI
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
  def init_index(directory_path):
7
  # Model parameters
 
10
  max_chunk_overlap = 20
11
  chunk_size_limit = 600
12
 
13
+ # Initialize LLM predictor with LangChain ChatOpenAI model
14
  prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
15
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
16
 
17
+ # Read documents from specified directory
18
  documents = SimpleDirectoryReader(directory_path).load_data()
19
 
20
+ # Initialize index with documents data
21
  service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
22
  index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
23
 
 
35
 
36
  return response.response
37
 
38
+ # Function to input OpenAI API key
39
+ def get_api_key():
40
+ os.environ["OPENAI_API_KEY"] = input("Please enter your OpenAI API key: ")
41
 
42
+ # Create UI interface to interact with GPT-3 model
43
  iface = gr.Interface(fn=chatbot,
44
  inputs=gr.components.Textbox(lines=7, placeholder="Enter your question here"),
45
  outputs="text",
46
+ title="Frost AI ChatBot: Your Knowledge Companion Powered by ChatGPT",
47
+ description="Ask any question about research papers",
48
  allow_screenshot=True)
49
+
50
+ # Add API key input to interface
51
+ iface.add_input("textbox", label="OpenAI API Key", type="text", default=get_api_key())
52
+
53
+ # Initialize index
54
+ init_index("docs")
55
+
56
+ # Launch the interface
57
  iface.launch(share=True)