maulana-m commited on
Commit
5463230
·
1 Parent(s): 7452ade

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -0
app.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import json
3
+ from uuid import uuid4
4
+ import requests
5
+ from pydantic import BaseModel
6
+ from typing import List
7
+ import ast
8
+ import os
9
+
10
+
11
+ QUEUE_MAX_SIZE = int(os.getenv("QUEUE_MAX_SIZE", 20))
12
+ QUEUE_CONCURENCY_COUNT = int(os.getenv("QUEUE_CONCURENCY_COUNT", 10))
13
+ USERNAME = os.getenv("USERNAME")
14
+ PASSWORD = os.getenv("PASSWORD")
15
+
16
+
17
+ class LearningBotRequest(BaseModel):
18
+ message: List[dict]
19
+ persona: str
20
+ session_id: str
21
+ context: dict
22
+ user_serial: str
23
+
24
+
25
+ def generate_uuid():
26
+ return str(uuid4())
27
+
28
+
29
+ def construct_message(list_message):
30
+ messages = []
31
+ for i, pair_message in enumerate(list_message):
32
+ if len(pair_message) < 2:
33
+ continue
34
+ content_user = {"human": pair_message[0]}
35
+ content_human = {"AI": pair_message[1]}
36
+ messages.append(content_user)
37
+ messages.append(content_human)
38
+
39
+ return messages
40
+
41
+
42
+ def send_message(url, request):
43
+ response = requests.post(url, data=request.json())
44
+ if response.status_code != 200:
45
+ raise gr.Error(response.text)
46
+ else:
47
+ result = response.json()["data"]["reply"]
48
+ return result
49
+
50
+
51
+ def respond(chat_history, message, session_id, user_serial, persona, context, endpoint):
52
+ if session_id is None:
53
+ session_id = generate_uuid()
54
+
55
+ context = ast.literal_eval(context)
56
+ messages = construct_message(chat_history)
57
+ messages.append(
58
+ {"human": message}
59
+ )
60
+
61
+ request = LearningBotRequest(
62
+ message=messages,
63
+ persona=persona,
64
+ session_id=session_id,
65
+ context=context,
66
+ user_serial=user_serial
67
+ )
68
+
69
+ response = send_message(endpoint, request)
70
+
71
+ return chat_history + [[message, response]], "Success", session_id
72
+
73
+
74
+ def reset_textbox():
75
+ return gr.update(value='')
76
+
77
+
78
+ with gr.Blocks() as demo:
79
+ with gr.Row():
80
+ with gr.Column(scale=5):
81
+ clear = gr.Button("Clear all converstation")
82
+ with gr.Column(scale=5):
83
+ endpoint = gr.Textbox(label="Endpoint API")
84
+ with gr.Accordion("Parameters", open=False):
85
+ user_serial = gr.Textbox(label="User serial")
86
+ context = gr.Textbox(label="context", value={})
87
+ persona = gr.Textbox(label="persona")
88
+
89
+ chatbot = gr.Chatbot()
90
+ message = gr.Textbox(placeholder="Halo kak, aku mau bertanya", label="Chat Here")
91
+ session_id = gr.State(value=generate_uuid())
92
+ with gr.Row():
93
+ with gr.Column(scale=5):
94
+ send = gr.Button("Send")
95
+ with gr.Column(scale=5):
96
+ status_box = gr.Textbox(label="Status code from OpenAI server", )
97
+
98
+ message.submit(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session_id])
99
+
100
+ clear.click(lambda: None, None, chatbot, queue=False)
101
+ clear.click(lambda: None, None, session_id, queue=False)
102
+
103
+ send.click(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session_id])
104
+ send.click(reset_textbox, [], [message])
105
+ message.submit(reset_textbox, [], [message])
106
+
107
+ (
108
+ demo
109
+ .queue(max_size=QUEUE_MAX_SIZE, concurrency_count=QUEUE_CONCURENCY_COUNT)
110
+ # .launch(auth=(USERNAME, PASSWORD), debug=True)
111
+ )