Spaces:
Runtime error
Runtime error
UPDATE GEMINI FOR FAST INF
Browse files
app.py
CHANGED
@@ -1,15 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from langchain.prompts import PromptTemplate
|
3 |
-
from langchain_community.llms import CTransformers
|
4 |
from langchain_community.vectorstores import Pinecone as LangchainPinecone
|
5 |
from langchain.chains import RetrievalQA
|
6 |
from pinecone import Pinecone
|
7 |
from dotenv import load_dotenv
|
8 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# Load environment variables
|
11 |
load_dotenv()
|
12 |
PINECONE_API_KEY = os.getenv('PINECONE_API_KEY')
|
|
|
13 |
index_name = "apple-chatbot"
|
14 |
|
15 |
class AppleChatbot:
|
@@ -25,17 +133,12 @@ class AppleChatbot:
|
|
25 |
|
26 |
def initialize_chatbot(self):
|
27 |
embeddings = self.download_hf_embeddings()
|
28 |
-
model_path = "TheBloke/Llama-2-7B-Chat-GGML"
|
29 |
-
llm = CTransformers(
|
30 |
-
model=model_path,
|
31 |
-
model_type="llama",
|
32 |
-
config={
|
33 |
-
'max_new_tokens': self.max_tokens,
|
34 |
-
'temperature': self.temperature
|
35 |
-
}
|
36 |
-
)
|
37 |
|
38 |
-
# Initialize
|
|
|
|
|
|
|
|
|
39 |
pc = Pinecone(api_key=PINECONE_API_KEY)
|
40 |
index = pc.Index(index_name)
|
41 |
|
@@ -80,7 +183,7 @@ demo = gr.ChatInterface(
|
|
80 |
chatbot=gr.Chatbot(height=600),
|
81 |
textbox=gr.Textbox(placeholder="Ask me anything about apple cultivation...", container=False),
|
82 |
title="Apple Orchard Expert Chatbot",
|
83 |
-
description="Ask questions about apple cultivation and orchard management. Built with Langchain, Pinecone, and
|
84 |
theme=gr.themes.Soft(),
|
85 |
examples=[
|
86 |
"What are the ideal conditions for growing apples?",
|
|
|
1 |
+
# import gradio as gr
|
2 |
+
# from langchain.prompts import PromptTemplate
|
3 |
+
# from langchain_community.llms import CTransformers
|
4 |
+
# from langchain_community.vectorstores import Pinecone as LangchainPinecone
|
5 |
+
# from langchain.chains import RetrievalQA
|
6 |
+
# from pinecone import Pinecone
|
7 |
+
# from dotenv import load_dotenv
|
8 |
+
# import os
|
9 |
+
|
10 |
+
# # Load environment variables
|
11 |
+
# load_dotenv()
|
12 |
+
# PINECONE_API_KEY = os.getenv('PINECONE_API_KEY')
|
13 |
+
# index_name = "apple-chatbot"
|
14 |
+
|
15 |
+
# class AppleChatbot:
|
16 |
+
# def __init__(self, k=2, max_tokens=512, temperature=0.8):
|
17 |
+
# self.k = k
|
18 |
+
# self.max_tokens = max_tokens
|
19 |
+
# self.temperature = temperature
|
20 |
+
# self.qa_chain = self.initialize_chatbot()
|
21 |
+
|
22 |
+
# def download_hf_embeddings(self):
|
23 |
+
# from langchain_community.embeddings import HuggingFaceEmbeddings
|
24 |
+
# return HuggingFaceEmbeddings()
|
25 |
+
|
26 |
+
# def initialize_chatbot(self):
|
27 |
+
# embeddings = self.download_hf_embeddings()
|
28 |
+
# model_path = "TheBloke/Llama-2-7B-Chat-GGML"
|
29 |
+
# llm = CTransformers(
|
30 |
+
# model=model_path,
|
31 |
+
# model_type="llama",
|
32 |
+
# config={
|
33 |
+
# 'max_new_tokens': self.max_tokens,
|
34 |
+
# 'temperature': self.temperature
|
35 |
+
# }
|
36 |
+
# )
|
37 |
+
|
38 |
+
# # Initialize pinecone
|
39 |
+
# pc = Pinecone(api_key=PINECONE_API_KEY)
|
40 |
+
# index = pc.Index(index_name)
|
41 |
+
|
42 |
+
# # Use the same prompt template from your original application
|
43 |
+
# prompt_template = """
|
44 |
+
# You are an expert in apple cultivation and orchard management. Use the following pieces of context to answer the question at the end.
|
45 |
+
# If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
46 |
+
# {context}
|
47 |
+
# Question: {question}
|
48 |
+
# Answer:"""
|
49 |
+
# PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
|
50 |
+
# chain_type_kwargs = {"prompt": PROMPT}
|
51 |
+
|
52 |
+
# docsearch = LangchainPinecone(index, embeddings.embed_query, "text")
|
53 |
+
# qa = RetrievalQA.from_chain_type(
|
54 |
+
# llm=llm,
|
55 |
+
# chain_type="stuff",
|
56 |
+
# retriever=docsearch.as_retriever(search_kwargs={'k': self.k}),
|
57 |
+
# return_source_documents=True,
|
58 |
+
# chain_type_kwargs=chain_type_kwargs
|
59 |
+
# )
|
60 |
+
# return qa
|
61 |
+
|
62 |
+
# def get_response(self, question):
|
63 |
+
# try:
|
64 |
+
# result = self.qa_chain({"query": question})
|
65 |
+
# return result["result"]
|
66 |
+
# except Exception as e:
|
67 |
+
# return f"Error: {str(e)}"
|
68 |
+
|
69 |
+
# # Initialize the chatbot
|
70 |
+
# chatbot = AppleChatbot()
|
71 |
+
|
72 |
+
# # Define the Gradio interface
|
73 |
+
# def respond(message, history):
|
74 |
+
# response = chatbot.get_response(message)
|
75 |
+
# return response
|
76 |
+
|
77 |
+
# # Create the Gradio interface
|
78 |
+
# demo = gr.ChatInterface(
|
79 |
+
# respond,
|
80 |
+
# chatbot=gr.Chatbot(height=600),
|
81 |
+
# textbox=gr.Textbox(placeholder="Ask me anything about apple cultivation...", container=False),
|
82 |
+
# title="Apple Orchard Expert Chatbot",
|
83 |
+
# description="Ask questions about apple cultivation and orchard management. Built with Langchain, Pinecone, and Llama-2.",
|
84 |
+
# theme=gr.themes.Soft(),
|
85 |
+
# examples=[
|
86 |
+
# "What are the ideal conditions for growing apples?",
|
87 |
+
# "How do I prevent common apple diseases?",
|
88 |
+
# "What is the best time to harvest apples?",
|
89 |
+
# ],
|
90 |
+
# cache_examples=False,
|
91 |
+
# )
|
92 |
+
|
93 |
+
# # Launch the interface
|
94 |
+
# if __name__ == "__main__":
|
95 |
+
# demo.queue() # Enable queuing
|
96 |
+
# demo.launch(
|
97 |
+
# server_name="0.0.0.0",
|
98 |
+
# server_port=7860,
|
99 |
+
# share=True
|
100 |
+
# )
|
101 |
+
|
102 |
+
|
103 |
import gradio as gr
|
104 |
from langchain.prompts import PromptTemplate
|
|
|
105 |
from langchain_community.vectorstores import Pinecone as LangchainPinecone
|
106 |
from langchain.chains import RetrievalQA
|
107 |
from pinecone import Pinecone
|
108 |
from dotenv import load_dotenv
|
109 |
import os
|
110 |
+
import google.generativeai as genai
|
111 |
+
import logging
|
112 |
+
|
113 |
+
# Configure logging
|
114 |
+
logging.basicConfig(level=logging.INFO)
|
115 |
+
logger = logging.getLogger(__name__)
|
116 |
|
117 |
# Load environment variables
|
118 |
load_dotenv()
|
119 |
PINECONE_API_KEY = os.getenv('PINECONE_API_KEY')
|
120 |
+
GEMINI_API_KEY = os.getenv('GEMINI_API_KEY')
|
121 |
index_name = "apple-chatbot"
|
122 |
|
123 |
class AppleChatbot:
|
|
|
133 |
|
134 |
def initialize_chatbot(self):
|
135 |
embeddings = self.download_hf_embeddings()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
|
137 |
+
# Initialize Gemini
|
138 |
+
genai.configure(api_key=GEMINI_API_KEY)
|
139 |
+
llm = genai.GenerativeModel('gemini-pro')
|
140 |
+
|
141 |
+
# Initialize Pinecone
|
142 |
pc = Pinecone(api_key=PINECONE_API_KEY)
|
143 |
index = pc.Index(index_name)
|
144 |
|
|
|
183 |
chatbot=gr.Chatbot(height=600),
|
184 |
textbox=gr.Textbox(placeholder="Ask me anything about apple cultivation...", container=False),
|
185 |
title="Apple Orchard Expert Chatbot",
|
186 |
+
description="Ask questions about apple cultivation and orchard management. Built with Langchain, Pinecone, and Gemini.",
|
187 |
theme=gr.themes.Soft(),
|
188 |
examples=[
|
189 |
"What are the ideal conditions for growing apples?",
|