thinkall commited on
Commit
c977014
·
1 Parent(s): ddae919

Update state

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +38 -27
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .cache
app.py CHANGED
@@ -12,8 +12,7 @@ from autogen.agentchat.contrib.retrieve_user_proxy_agent import (
12
  )
13
 
14
 
15
- def initialize_agents(docs_path=None):
16
- global config_list
17
  if isinstance(config_list, gr.State):
18
  _config_list = config_list.value
19
  else:
@@ -45,8 +44,8 @@ def initialize_agents(docs_path=None):
45
  return assistant, ragproxyagent
46
 
47
 
48
- def initiate_chat(problem, queue, n_results=3):
49
- global assistant, ragproxyagent, config_list
50
  if isinstance(config_list, gr.State):
51
  _config_list = config_list.value
52
  else:
@@ -55,13 +54,13 @@ def initiate_chat(problem, queue, n_results=3):
55
  queue.put(["Please set the LLM config first"])
56
  return
57
  else:
58
- llm_config ={
59
- "request_timeout": 600,
 
60
  "seed": 42,
61
  "config_list": _config_list,
62
  },
63
- print(llm_config, type(llm_config))
64
- print(assistant.llm_config, type(assistant.llm_config))
65
  assistant.llm_config.update(llm_config[0])
66
  assistant.reset()
67
  ragproxyagent.initiate_chat(
@@ -79,7 +78,7 @@ def chatbot_reply(input_text):
79
  queue = mp.Queue()
80
  process = mp.Process(
81
  target=initiate_chat,
82
- args=(input_text, queue),
83
  )
84
  process.start()
85
  process.join()
@@ -97,7 +96,7 @@ def get_description_text():
97
  """
98
 
99
 
100
- global config_list, assistant, ragproxyagent
101
 
102
  with gr.Blocks() as demo:
103
  config_list, assistant, ragproxyagent = (
@@ -115,6 +114,7 @@ with gr.Blocks() as demo:
115
  None,
116
  None,
117
  )
 
118
 
119
  gr.Markdown(get_description_text())
120
  chatbot = gr.Chatbot(
@@ -135,7 +135,6 @@ with gr.Blocks() as demo:
135
  with gr.Row():
136
 
137
  def upload_file(file):
138
- global config_list, assistant, ragproxyagent
139
  update_context_url(file.name)
140
 
141
  upload_button = gr.UploadButton(
@@ -148,21 +147,36 @@ with gr.Blocks() as demo:
148
  def update_config(config_list):
149
  global assistant, ragproxyagent
150
  config_list = autogen.config_list_from_models(
151
- model_list=[os.environ["MODEL"]]
152
  )
153
- print(config_list, type(config_list))
154
- assistant, ragproxyagent = (
155
- initialize_agents() if config_list else (None, None)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
156
  )
 
 
157
  return config_list
158
 
159
  def set_params(model, oai_key, aoai_key, aoai_base):
160
- global config_list, assistant, ragproxyagent
161
  os.environ["MODEL"] = model
162
  os.environ["OPENAI_API_KEY"] = oai_key
163
  os.environ["AZURE_OPENAI_API_KEY"] = aoai_key
164
  os.environ["AZURE_OPENAI_API_BASE"] = aoai_base
165
- config_list = update_config(config_list)
166
  return model, oai_key, aoai_key, aoai_base
167
 
168
  txt_model = gr.Dropdown(
@@ -173,7 +187,7 @@ with gr.Blocks() as demo:
173
  "gpt-3.5-turbo",
174
  ],
175
  allow_custom_value=True,
176
- default_value="gpt-35-turbo",
177
  container=True,
178
  )
179
  txt_oai_key = gr.Textbox(
@@ -203,12 +217,6 @@ with gr.Blocks() as demo:
203
  container=True,
204
  type="password",
205
  )
206
- set_params_button = gr.Button(value="Set Params", type="button")
207
- set_params_button.click(
208
- set_params,
209
- [txt_model, txt_oai_key, txt_aoai_key, txt_aoai_base_url],
210
- [txt_model, txt_oai_key, txt_aoai_key, txt_aoai_base_url],
211
- )
212
 
213
  clear = gr.ClearButton([txt_input, chatbot])
214
 
@@ -231,7 +239,10 @@ with gr.Blocks() as demo:
231
  layout={"height": 20},
232
  )
233
 
234
- def respond(message, chat_history):
 
 
 
235
  messages = chatbot_reply(message)
236
  chat_history.append(
237
  (message, messages[-1] if messages[-1] != "TERMINATE" else messages[-2])
@@ -248,10 +259,10 @@ with gr.Blocks() as demo:
248
  shutil.rmtree("/tmp/chromadb/")
249
  except:
250
  pass
251
- assistant, ragproxyagent = initialize_agents(docs_path=context_url)
252
  return context_url
253
 
254
- txt_input.submit(respond, [txt_input, chatbot], [txt_input, chatbot])
255
  txt_prompt.submit(update_prompt, [txt_prompt], [txt_prompt])
256
  txt_context_url.submit(update_context_url, [txt_context_url], [txt_context_url])
257
 
 
12
  )
13
 
14
 
15
+ def initialize_agents(config_list, docs_path=None):
 
16
  if isinstance(config_list, gr.State):
17
  _config_list = config_list.value
18
  else:
 
44
  return assistant, ragproxyagent
45
 
46
 
47
+ def initiate_chat(config_list, problem, queue, n_results=3):
48
+ global assistant, ragproxyagent
49
  if isinstance(config_list, gr.State):
50
  _config_list = config_list.value
51
  else:
 
54
  queue.put(["Please set the LLM config first"])
55
  return
56
  else:
57
+ llm_config = (
58
+ {
59
+ "request_timeout": 120,
60
  "seed": 42,
61
  "config_list": _config_list,
62
  },
63
+ )
 
64
  assistant.llm_config.update(llm_config[0])
65
  assistant.reset()
66
  ragproxyagent.initiate_chat(
 
78
  queue = mp.Queue()
79
  process = mp.Process(
80
  target=initiate_chat,
81
+ args=(config_list, input_text, queue),
82
  )
83
  process.start()
84
  process.join()
 
96
  """
97
 
98
 
99
+ global assistant, ragproxyagent
100
 
101
  with gr.Blocks() as demo:
102
  config_list, assistant, ragproxyagent = (
 
114
  None,
115
  None,
116
  )
117
+ assistant, ragproxyagent = initialize_agents(config_list)
118
 
119
  gr.Markdown(get_description_text())
120
  chatbot = gr.Chatbot(
 
135
  with gr.Row():
136
 
137
  def upload_file(file):
 
138
  update_context_url(file.name)
139
 
140
  upload_button = gr.UploadButton(
 
147
  def update_config(config_list):
148
  global assistant, ragproxyagent
149
  config_list = autogen.config_list_from_models(
150
+ model_list=[os.environ.get("MODEL", "gpt-35-turbo")],
151
  )
152
+ if not config_list:
153
+ config_list = [
154
+ {
155
+ "api_key": "",
156
+ "api_base": "",
157
+ "api_type": "azure",
158
+ "api_version": "2023-07-01-preview",
159
+ "model": "gpt-35-turbo",
160
+ }
161
+ ]
162
+ print("config_list: ", config_list)
163
+ llm_config = (
164
+ {
165
+ "request_timeout": 120,
166
+ "seed": 42,
167
+ "config_list": config_list,
168
+ },
169
  )
170
+ assistant.llm_config.update(llm_config[0])
171
+ ragproxyagent._model = config_list[0]["model"]
172
  return config_list
173
 
174
  def set_params(model, oai_key, aoai_key, aoai_base):
 
175
  os.environ["MODEL"] = model
176
  os.environ["OPENAI_API_KEY"] = oai_key
177
  os.environ["AZURE_OPENAI_API_KEY"] = aoai_key
178
  os.environ["AZURE_OPENAI_API_BASE"] = aoai_base
179
+ print("model: ", model, "oai_key: ", oai_key, "aoai_key: ", aoai_key, "aoai_base: ", aoai_base)
180
  return model, oai_key, aoai_key, aoai_base
181
 
182
  txt_model = gr.Dropdown(
 
187
  "gpt-3.5-turbo",
188
  ],
189
  allow_custom_value=True,
190
+ value="gpt-35-turbo",
191
  container=True,
192
  )
193
  txt_oai_key = gr.Textbox(
 
217
  container=True,
218
  type="password",
219
  )
 
 
 
 
 
 
220
 
221
  clear = gr.ClearButton([txt_input, chatbot])
222
 
 
239
  layout={"height": 20},
240
  )
241
 
242
+ def respond(message, chat_history, model, oai_key, aoai_key, aoai_base):
243
+ global config_list
244
+ set_params(model, oai_key, aoai_key, aoai_base)
245
+ config_list = update_config(config_list)
246
  messages = chatbot_reply(message)
247
  chat_history.append(
248
  (message, messages[-1] if messages[-1] != "TERMINATE" else messages[-2])
 
259
  shutil.rmtree("/tmp/chromadb/")
260
  except:
261
  pass
262
+ assistant, ragproxyagent = initialize_agents(config_list, docs_path=context_url)
263
  return context_url
264
 
265
+ txt_input.submit(respond, [txt_input, chatbot, txt_model, txt_oai_key, txt_aoai_key, txt_aoai_base_url], [txt_input, chatbot])
266
  txt_prompt.submit(update_prompt, [txt_prompt], [txt_prompt])
267
  txt_context_url.submit(update_context_url, [txt_context_url], [txt_context_url])
268