Omnibus commited on
Commit
d9cf5d2
·
verified ·
1 Parent(s): 7b316de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -29
app.py CHANGED
@@ -133,36 +133,36 @@ def run_gpt_no_prefix(
133
  **prompt_kwargs,
134
  ):
135
  print(seed)
136
- timestamp=datetime.datetime.now()
137
-
138
- generate_kwargs = dict(
139
- temperature=0.9,
140
- max_new_tokens=max_tokens,
141
- top_p=0.95,
142
- repetition_penalty=1.0,
143
- do_sample=True,
144
- seed=seed,
145
- )
146
-
147
- content = prompt_template.format(**prompt_kwargs)
148
- #if VERBOSE:
149
- print(LOG_PROMPT.format(content))
 
 
 
150
 
 
 
 
 
 
151
 
152
- #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
153
- #formatted_prompt = format_prompt(f'{content}', history)
154
-
155
- stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
156
- resp = ""
157
- for response in stream:
158
- resp += response.token.text
159
- #yield resp
160
-
161
- #if VERBOSE:
162
- print(LOG_RESPONSE.format(resp))
163
- return resp
164
-
165
-
166
  def run_gpt(
167
  prompt_template,
168
  stop_tokens,
@@ -304,13 +304,14 @@ def get_chart(inp):
304
  try:
305
  resp = run_gpt_no_prefix(
306
  GET_CHART,
307
- stop_tokens=["observation:", "task:", "action:", "thought:"],
308
  max_tokens=8192,
309
  seed=seed,
310
  inp=inp,
311
  ).strip("\n")
312
  print(resp)
313
  except Exception as e:
 
314
  resp = e
315
  return resp
316
  def summarize(inp,history,report_check,chart_check,data=None,files=None,url=None,pdf_url=None,pdf_batch=None):
 
133
  **prompt_kwargs,
134
  ):
135
  print(seed)
136
+ try:
137
+ generate_kwargs = dict(
138
+ temperature=0.9,
139
+ max_new_tokens=max_tokens,
140
+ top_p=0.95,
141
+ repetition_penalty=1.0,
142
+ do_sample=True,
143
+ seed=seed,
144
+ )
145
+
146
+ content = prompt_template.format(**prompt_kwargs)
147
+ #if VERBOSE:
148
+ print(LOG_PROMPT.format(content))
149
+
150
+
151
+ #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
152
+ #formatted_prompt = format_prompt(f'{content}', history)
153
 
154
+ stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
155
+ resp = ""
156
+ for response in stream:
157
+ resp += response.token.text
158
+ #yield resp
159
 
160
+ #if VERBOSE:
161
+ print(LOG_RESPONSE.format(resp))
162
+ return resp
163
+ except Exception as e:
164
+ print(f'no_prefix_error:: {e}')
165
+ return "Error"
 
 
 
 
 
 
 
 
166
  def run_gpt(
167
  prompt_template,
168
  stop_tokens,
 
304
  try:
305
  resp = run_gpt_no_prefix(
306
  GET_CHART,
307
+ stop_tokens=[],
308
  max_tokens=8192,
309
  seed=seed,
310
  inp=inp,
311
  ).strip("\n")
312
  print(resp)
313
  except Exception as e:
314
+ print(f'Error:: {e}')
315
  resp = e
316
  return resp
317
  def summarize(inp,history,report_check,chart_check,data=None,files=None,url=None,pdf_url=None,pdf_batch=None):