BotifyCloudAdmin commited on
Commit
abca416
·
verified ·
1 Parent(s): 94eef02

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -25
app.py CHANGED
@@ -24,6 +24,9 @@ def respond(
24
  temperature: float,
25
  top_p: float,
26
  ):
 
 
 
27
  messages = [{"role": "system", "content": system_message}]
28
  for user_msg, assistant_msg in history:
29
  if user_msg:
@@ -35,30 +38,34 @@ def respond(
35
  response = ""
36
  citations = []
37
 
38
- stream = px_client.chat.completions.create(
39
- model=AVAILABLE_MODELS[model_choice],
40
- messages=messages,
41
- max_tokens=max_tokens,
42
- temperature=temperature,
43
- top_p=top_p,
44
- stream=True,
45
- )
46
-
47
- for chunk in stream:
48
- if "choices" in chunk:
49
- token = chunk.choices[0].delta.content or ""
50
- response += token
51
- yield response # Stream response as it arrives
52
- if "citations" in chunk:
53
- citations = chunk["citations"]
54
-
55
- # Append citations as clickable links
56
- if citations:
57
- citation_text = "\n\nSources:\n" + "\n".join(
58
- [f"[{i+1}] [{url}]({url})" for i, url in enumerate(citations)]
59
  )
60
- response += citation_text
61
- yield response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
  def check_password(input_password):
64
  if input_password == PASSWORD:
@@ -83,7 +90,7 @@ with gr.Blocks() as demo:
83
  chat = gr.ChatInterface(
84
  respond,
85
  chatbot=gr.Chatbot(height=400), # Set the desired height here
86
- additional_inputs=[]
87
  )
88
 
89
  with gr.Column():
@@ -102,9 +109,12 @@ with gr.Blocks() as demo:
102
  minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"
103
  )
104
 
 
 
 
105
  submit_button.click(
106
  check_password, inputs=password_input, outputs=[password_input, chat_interface]
107
  )
108
 
109
  if __name__ == "__main__":
110
- demo.launch(share=True)
 
24
  temperature: float,
25
  top_p: float,
26
  ):
27
+ if model_choice not in AVAILABLE_MODELS:
28
+ return "Error: Invalid model selection."
29
+
30
  messages = [{"role": "system", "content": system_message}]
31
  for user_msg, assistant_msg in history:
32
  if user_msg:
 
38
  response = ""
39
  citations = []
40
 
41
+ try:
42
+ stream = px_client.chat.completions.create(
43
+ model=AVAILABLE_MODELS[model_choice],
44
+ messages=messages,
45
+ max_tokens=max_tokens,
46
+ temperature=temperature,
47
+ top_p=top_p,
48
+ stream=True,
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  )
50
+
51
+ for chunk in stream:
52
+ if hasattr(chunk, "choices") and chunk.choices:
53
+ token = chunk.choices[0].delta.content or ""
54
+ response += token
55
+ yield response # Stream response as it arrives
56
+ if hasattr(chunk, "citations") and chunk.citations:
57
+ citations = chunk.citations
58
+
59
+ # Append citations as clickable links
60
+ if citations:
61
+ citation_text = "\n\nSources:\n" + "\n".join(
62
+ [f"[{i+1}] [{url}]({url})" for i, url in enumerate(citations)]
63
+ )
64
+ response += citation_text
65
+ yield response
66
+
67
+ except Exception as e:
68
+ yield f"Error: {str(e)}"
69
 
70
  def check_password(input_password):
71
  if input_password == PASSWORD:
 
90
  chat = gr.ChatInterface(
91
  respond,
92
  chatbot=gr.Chatbot(height=400), # Set the desired height here
93
+ additional_inputs=[system_prompt], # Include system message explicitly
94
  )
95
 
96
  with gr.Column():
 
109
  minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"
110
  )
111
 
112
+ # Update chat interface to include additional inputs
113
+ chat.additional_inputs.extend([model_choice, max_tokens, temperature, top_p])
114
+
115
  submit_button.click(
116
  check_password, inputs=password_input, outputs=[password_input, chat_interface]
117
  )
118
 
119
  if __name__ == "__main__":
120
+ demo.launch()