vpcom commited on
Commit
2667d9e
·
1 Parent(s): 8cf621f

fix: stream not supported for this model

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -39,7 +39,7 @@ Falcon:"""
39
  seed = 42
40
 
41
  def generate(
42
- prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
43
  ):
44
  temperature = float(temperature)
45
  if temperature < 1e-2:
@@ -58,7 +58,7 @@ def generate(
58
  seed = seed + 1
59
  formatted_prompt = format_prompt(prompt, history, system_prompt)
60
 
61
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
62
  output = ""
63
 
64
  for response in stream:
 
39
  seed = 42
40
 
41
  def generate(
42
+ prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=250, top_p=0.95, repetition_penalty=1.0,
43
  ):
44
  temperature = float(temperature)
45
  if temperature < 1e-2:
 
58
  seed = seed + 1
59
  formatted_prompt = format_prompt(prompt, history, system_prompt)
60
 
61
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=True, return_full_text=False)
62
  output = ""
63
 
64
  for response in stream: