momondi commited on
Commit
eab08c4
·
verified ·
1 Parent(s): c858739

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -71
app.py CHANGED
@@ -1,99 +1,93 @@
1
- import gradio as gr
2
-
3
- def greet(name):
4
- return f"Hello {name}!"
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
8
- # import pandas as pd
9
 
10
 
11
- # df = pd.read_json("./tourisme_chatbot.json")
12
 
13
- # context_data = []
14
- # for i in range(len(df)):
15
- # context = ""
16
- # for j in range(4):
17
- # context += df.columns[j]
18
- # context += ": "
19
- # context += df.iloc[i][j]
20
- # context += " "
21
- # context_data.append(context)
22
 
23
 
24
- # import os
25
 
26
- # # Get the secret key from the environment
27
- # groq_key = os.environ.get('groq_api_key')
28
 
29
- # ## LLM used for RAG
30
- # from langchain_groq import ChatGroq
31
 
32
- # llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key)
33
 
34
- # ## Embedding model!
35
- # from langchain_huggingface import HuggingFaceEmbeddings
36
- # embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
37
 
38
- # # create vector store!
39
- # from langchain_chroma import Chroma
40
 
41
- # vectorstore = Chroma(
42
- # collection_name="tourism_dataset_store",
43
- # embedding_function=embed_model,
44
- # persist_directory="./",
45
- # )
46
 
47
- # # add data to vector nstore
48
- # vectorstore.add_texts(context_data)
49
 
50
- # retriever = vectorstore.as_retriever()
51
 
52
- # from langchain_core.prompts import PromptTemplate
53
 
54
- # template = ("""You are a Moroccan tourism expert.
55
- # Use the provided context to answer the question.
56
- # If you don't know the answer, say so. Explain your answer in detail.
57
- # Do not discuss the context in your response; just provide the answer directly.
58
- # Context: {context}
59
- # Question: {question}
60
- # Answer:""")
61
 
62
- # rag_prompt = PromptTemplate.from_template(template)
63
 
64
- # from langchain_core.output_parsers import StrOutputParser
65
- # from langchain_core.runnables import RunnablePassthrough
66
 
67
- # rag_chain = (
68
- # {"context": retriever, "question": RunnablePassthrough()}
69
- # | rag_prompt
70
- # | llm
71
- # | StrOutputParser()
72
- # )
73
 
74
- # import gradio as gr
75
 
76
- # def rag_memory_stream(text):
77
- # partial_text = ""
78
- # for new_text in rag_chain.stream(text):
79
- # partial_text += new_text
80
- # yield partial_text
81
 
82
- # examples = ['Tourist attraction sites in Morocco', 'What are some fun activities to do in Morocco?', 'What can I do in Marrakech 40000 Morocco?']
83
 
84
 
85
 
86
 
87
- # title = "Real-time AI App with Groq API and LangChain to Answer Morroco Tourism questions"
88
- # demo = gr.Interface(
89
- # title=title,
90
- # fn=rag_memory_stream,
91
- # inputs="text",
92
- # outputs="text",
93
- # examples=examples,
94
- # allow_flagging="never",
95
- # )
96
 
97
 
98
- # if __name__ == "__main__":
99
- # demo.launch()
 
 
 
 
 
1
 
2
+ import pandas as pd
 
 
3
 
4
 
5
+ df = pd.read_json("./tourisme_chatbot.json")
6
 
7
+ context_data = []
8
+ for i in range(len(df)):
9
+ context = ""
10
+ for j in range(4):
11
+ context += df.columns[j]
12
+ context += ": "
13
+ context += df.iloc[i][j]
14
+ context += " "
15
+ context_data.append(context)
16
 
17
 
18
+ import os
19
 
20
+ # Get the secret key from the environment
21
+ groq_key = os.environ.get('groq_api_key')
22
 
23
+ ## LLM used for RAG
24
+ from langchain_groq import ChatGroq
25
 
26
+ llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key)
27
 
28
+ ## Embedding model!
29
+ from langchain_huggingface import HuggingFaceEmbeddings
30
+ embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
31
 
32
+ # create vector store!
33
+ from langchain_chroma import Chroma
34
 
35
+ vectorstore = Chroma(
36
+ collection_name="tourism_dataset_store",
37
+ embedding_function=embed_model,
38
+ persist_directory="./",
39
+ )
40
 
41
+ # add data to vector nstore
42
+ vectorstore.add_texts(context_data)
43
 
44
+ retriever = vectorstore.as_retriever()
45
 
46
+ from langchain_core.prompts import PromptTemplate
47
 
48
+ template = ("""You are a Moroccan tourism expert.
49
+ Use the provided context to answer the question.
50
+ If you don't know the answer, say so. Explain your answer in detail.
51
+ Do not discuss the context in your response; just provide the answer directly.
52
+ Context: {context}
53
+ Question: {question}
54
+ Answer:""")
55
 
56
+ rag_prompt = PromptTemplate.from_template(template)
57
 
58
+ from langchain_core.output_parsers import StrOutputParser
59
+ from langchain_core.runnables import RunnablePassthrough
60
 
61
+ rag_chain = (
62
+ {"context": retriever, "question": RunnablePassthrough()}
63
+ | rag_prompt
64
+ | llm
65
+ | StrOutputParser()
66
+ )
67
 
68
+ import gradio as gr
69
 
70
+ def rag_memory_stream(text):
71
+ partial_text = ""
72
+ for new_text in rag_chain.stream(text):
73
+ partial_text += new_text
74
+ yield partial_text
75
 
76
+ examples = ['Tourist attraction sites in Morocco', 'What are some fun activities to do in Morocco?', 'What can I do in Marrakech 40000 Morocco?']
77
 
78
 
79
 
80
 
81
+ title = "Real-time AI App with Groq API and LangChain to Answer Morroco Tourism questions"
82
+ demo = gr.Interface(
83
+ title=title,
84
+ fn=rag_memory_stream,
85
+ inputs="text",
86
+ outputs="text",
87
+ examples=examples,
88
+ allow_flagging="never",
89
+ )
90
 
91
 
92
+ if __name__ == "__main__":
93
+ demo.launch()