BotifyCloudAdmin commited on
Commit
d192e97
·
verified ·
1 Parent(s): fe5d313

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -9
app.py CHANGED
@@ -33,7 +33,7 @@ def respond(
33
 
34
  response = ""
35
  citations = []
36
-
37
  stream = px_client.chat.completions.create(
38
  model=AVAILABLE_MODELS[model_choice],
39
  messages=messages,
@@ -42,7 +42,7 @@ def respond(
42
  top_p=top_p,
43
  stream=True,
44
  )
45
-
46
  for chunk in stream:
47
  if "choices" in chunk:
48
  token = chunk.choices[0].delta.content or ""
@@ -50,7 +50,7 @@ def respond(
50
  yield response # Stream response as it arrives
51
  if "citations" in chunk:
52
  citations = chunk["citations"]
53
-
54
  # Append citations as clickable links
55
  if citations:
56
  citation_text = "\n\nSources:\n" + "\n".join(
@@ -65,7 +65,14 @@ def check_password(input_password):
65
  else:
66
  return gr.update(value="", interactive=True), gr.update(visible=False)
67
 
68
- with gr.Blocks() as demo:
 
 
 
 
 
 
 
69
  with gr.Column():
70
  password_input = gr.Textbox(
71
  type="password", label="Enter Password", interactive=True
@@ -74,7 +81,7 @@ with gr.Blocks() as demo:
74
  error_message = gr.Textbox(
75
  label="Error", visible=False, interactive=False
76
  )
77
-
78
  with gr.Column(visible=False) as chat_interface:
79
  system_prompt = gr.Textbox(
80
  value="You are a helpful assistant.", label="System message"
@@ -82,9 +89,9 @@ with gr.Blocks() as demo:
82
  chat = gr.ChatInterface(
83
  respond,
84
  additional_inputs=[],
85
- height=600 # Increased height for better visibility
86
  )
87
-
88
  with gr.Column():
89
  model_choice = gr.Dropdown(
90
  choices=list(AVAILABLE_MODELS.keys()),
@@ -100,10 +107,10 @@ with gr.Blocks() as demo:
100
  top_p = gr.Slider(
101
  minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"
102
  )
103
-
104
  submit_button.click(
105
  check_password, inputs=password_input, outputs=[password_input, chat_interface]
106
  )
107
 
108
  if __name__ == "__main__":
109
- demo.launch(share=True)
 
33
 
34
  response = ""
35
  citations = []
36
+
37
  stream = px_client.chat.completions.create(
38
  model=AVAILABLE_MODELS[model_choice],
39
  messages=messages,
 
42
  top_p=top_p,
43
  stream=True,
44
  )
45
+
46
  for chunk in stream:
47
  if "choices" in chunk:
48
  token = chunk.choices[0].delta.content or ""
 
50
  yield response # Stream response as it arrives
51
  if "citations" in chunk:
52
  citations = chunk["citations"]
53
+
54
  # Append citations as clickable links
55
  if citations:
56
  citation_text = "\n\nSources:\n" + "\n".join(
 
65
  else:
66
  return gr.update(value="", interactive=True), gr.update(visible=False)
67
 
68
+ # Custom CSS to adjust the chat interface height
69
+ custom_css = """
70
+ #chat_interface .overflow-y-auto {
71
+ height: 600px;
72
+ }
73
+ """
74
+
75
+ with gr.Blocks(css=custom_css) as demo:
76
  with gr.Column():
77
  password_input = gr.Textbox(
78
  type="password", label="Enter Password", interactive=True
 
81
  error_message = gr.Textbox(
82
  label="Error", visible=False, interactive=False
83
  )
84
+
85
  with gr.Column(visible=False) as chat_interface:
86
  system_prompt = gr.Textbox(
87
  value="You are a helpful assistant.", label="System message"
 
89
  chat = gr.ChatInterface(
90
  respond,
91
  additional_inputs=[],
92
+ elem_id="chat_interface" # Assign an element ID for CSS targeting
93
  )
94
+
95
  with gr.Column():
96
  model_choice = gr.Dropdown(
97
  choices=list(AVAILABLE_MODELS.keys()),
 
107
  top_p = gr.Slider(
108
  minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"
109
  )
110
+
111
  submit_button.click(
112
  check_password, inputs=password_input, outputs=[password_input, chat_interface]
113
  )
114
 
115
  if __name__ == "__main__":
116
+ demo.launch(share=True)