binqiangliu commited on
Commit
c97b3fb
·
1 Parent(s): 93e9504

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -32
app.py CHANGED
@@ -45,30 +45,6 @@ if "pdf_files" not in st.session_state:
45
  if "documents" not in st.session_state:
46
  st.session_state.documents = None
47
 
48
- if "embed_model" not in st.session_state:
49
- st.session_state.embed_model = None
50
-
51
- if "llm_predictor" not in st.session_state:
52
- st.session_state.llm_predictor = None
53
-
54
- if "service_context" not in st.session_state:
55
- st.session_state.service_context = None
56
-
57
- if "new_index" not in st.session_state:
58
- st.session_state.new_index = None
59
-
60
- if "storage_context" not in st.session_state:
61
- st.session_state.storage_context = None
62
-
63
- if "loadedindex" not in st.session_state:
64
- st.session_state.loadedindex = None
65
-
66
- if "query_engine" not in st.session_state:
67
- st.session_state.query_engine = None
68
-
69
- if "user_question " not in st.session_state:
70
- st.session_state.user_question = ""
71
-
72
  with st.sidebar:
73
  st.subheader("Upload your Documents Here: ")
74
  #if "pdf_files" not in st.session_state:
@@ -103,18 +79,22 @@ with st.sidebar:
103
 
104
  start_2 = timeit.default_timer() # Start timer
105
  st.write(f"向量模型加载开始:{start_2}")
106
- st.session_state.embed_model = LangchainEmbedding(HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2'))
 
107
  end_2 = timeit.default_timer() # Start timer
108
  st.write(f"向量模型加载加载结束:{end_2}")
109
  st.write(f"向量模型加载耗时:{end_2 - start_2}")
110
 
111
- st.session_state.llm_predictor = LLMPredictor(HuggingFaceHub(repo_id="HuggingFaceH4/starchat-beta", model_kwargs={"min_length":100, "max_new_tokens":1024, "do_sample":True, "temperature":0.2,"top_k":50, "top_p":0.95, "eos_token_id":49155}))
 
112
 
113
- st.session_state.service_context = ServiceContext.from_defaults(llm_predictor=st.session_state.llm_predictor, embed_model=st.session_state.embed_model)
 
114
 
115
  start_3 = timeit.default_timer() # Start timer
116
  st.write(f"向量库构建开始:{start_3}")
117
- st.session_state.new_index = VectorStoreIndex.from_documents(
 
118
  st.session_state.documents,
119
  service_context=st.session_state.service_context,
120
  )
@@ -124,18 +104,22 @@ st.write(f"向量库构建耗时:{end_3 - start_3}")
124
 
125
  st.session_state.new_index.storage_context.persist("st.session_state.directory_path")
126
 
127
- st.session_state.storage_context = StorageContext.from_defaults(persist_dir="st.session_state.directory_path")
 
128
 
129
  start_4 = timeit.default_timer() # Start timer
130
  st.write(f"向量库装载开始:{start_4}")
131
- st.session_state.loadedindex = load_index_from_storage(storage_context=st.session_state.storage_context, service_context=st.session_state.service_context)
 
132
  end_4 = timeit.default_timer() # Start timer
133
  st.write(f"向量库装载结束:{end_4}")
134
  st.write(f"向量库装载耗时:{end_4 - start_4}")
135
 
136
- st.session_state.query_engine = st.session_state.loadedindex.as_query_engine()
 
137
 
138
- st.session_state.user_question=st.text_input("Enter your query:")
 
139
  if st.session_state.user_question !="" and not st.session_state.user_question.strip().isspace() and not st.session_state.user_question == "" and not st.session_state.user_question.strip() == "" and not st.session_state.user_question.isspace():
140
  print("user question: "+st.session_state.user_question)
141
  with st.spinner("AI Thinking...Please wait a while to Cheers!"):
 
45
  if "documents" not in st.session_state:
46
  st.session_state.documents = None
47
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  with st.sidebar:
49
  st.subheader("Upload your Documents Here: ")
50
  #if "pdf_files" not in st.session_state:
 
79
 
80
  start_2 = timeit.default_timer() # Start timer
81
  st.write(f"向量模型加载开始:{start_2}")
82
+ if "embed_model" not in st.session_state:
83
+ st.session_state.embed_model = LangchainEmbedding(HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2'))
84
  end_2 = timeit.default_timer() # Start timer
85
  st.write(f"向量模型加载加载结束:{end_2}")
86
  st.write(f"向量模型加载耗时:{end_2 - start_2}")
87
 
88
+ if "llm_predictor" not in st.session_state:
89
+ st.session_state.llm_predictor = LLMPredictor(HuggingFaceHub(repo_id="HuggingFaceH4/starchat-beta", model_kwargs={"min_length":100, "max_new_tokens":1024, "do_sample":True, "temperature":0.2,"top_k":50, "top_p":0.95, "eos_token_id":49155}))
90
 
91
+ if "service_context" not in st.session_state:
92
+ st.session_state.service_context = ServiceContext.from_defaults(llm_predictor=st.session_state.llm_predictor, embed_model=st.session_state.embed_model)
93
 
94
  start_3 = timeit.default_timer() # Start timer
95
  st.write(f"向量库构建开始:{start_3}")
96
+ if "new_index" not in st.session_state:
97
+ st.session_state.new_index = VectorStoreIndex.from_documents(
98
  st.session_state.documents,
99
  service_context=st.session_state.service_context,
100
  )
 
104
 
105
  st.session_state.new_index.storage_context.persist("st.session_state.directory_path")
106
 
107
+ if "storage_context" not in st.session_state:
108
+ st.session_state.storage_context = StorageContext.from_defaults(persist_dir="st.session_state.directory_path")
109
 
110
  start_4 = timeit.default_timer() # Start timer
111
  st.write(f"向量库装载开始:{start_4}")
112
+ if "loadedindex" not in st.session_state:
113
+ st.session_state.loadedindex = load_index_from_storage(storage_context=st.session_state.storage_context, service_context=st.session_state.service_context)
114
  end_4 = timeit.default_timer() # Start timer
115
  st.write(f"向量库装载结束:{end_4}")
116
  st.write(f"向量库装载耗时:{end_4 - start_4}")
117
 
118
+ if "query_engine" not in st.session_state:
119
+ st.session_state.query_engine = st.session_state.loadedindex.as_query_engine()
120
 
121
+ if "user_question " not in st.session_state:
122
+ st.session_state.user_question = st.text_input("Enter your query:")
123
  if st.session_state.user_question !="" and not st.session_state.user_question.strip().isspace() and not st.session_state.user_question == "" and not st.session_state.user_question.strip() == "" and not st.session_state.user_question.isspace():
124
  print("user question: "+st.session_state.user_question)
125
  with st.spinner("AI Thinking...Please wait a while to Cheers!"):