Update app.py
Browse files
app.py
CHANGED
|
@@ -22,39 +22,25 @@ def read_uploaded_file(file):
|
|
| 22 |
except Exception as e:
|
| 23 |
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
| 24 |
|
| 25 |
-
def
|
| 26 |
-
|
| 27 |
-
history: List[Tuple[str, str]],
|
| 28 |
-
fashion_file, # νμΌ μ
λ‘λ μ
λ ₯
|
| 29 |
-
uhd_file, # νμΌ μ
λ‘λ μ
λ ₯
|
| 30 |
-
mixgen_file, # νμΌ μ
λ‘λ μ
λ ₯
|
| 31 |
-
parquet_file, # νμΌ μ
λ‘λ μ
λ ₯
|
| 32 |
-
system_message="",
|
| 33 |
-
max_tokens=1024,
|
| 34 |
-
temperature=0.7,
|
| 35 |
-
top_p=0.9,
|
| 36 |
-
):
|
| 37 |
-
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
|
| 38 |
|
| 39 |
-
if
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
parquet_content = read_uploaded_file(parquet_file)
|
| 56 |
-
system_message += f"\n\ntest.parquet νμΌ λ΄μ©:\n```markdown\n{parquet_content}\n```"
|
| 57 |
-
message = "test.parquet νμΌμ λν λ΄μ©μ νμ΅νμκ³ , κ΄λ ¨ μ€λͺ
λ° Q&Aλ₯Ό μ§νν μ€λΉκ° λμ΄μλ€. κΆκΈν μ μ΄ μμΌλ©΄ λ¬Όμ΄λ³΄λΌ."
|
| 58 |
|
| 59 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
| 60 |
for val in history:
|
|
@@ -66,49 +52,43 @@ def respond(
|
|
| 66 |
|
| 67 |
response = ""
|
| 68 |
try:
|
| 69 |
-
for
|
| 70 |
messages,
|
| 71 |
max_tokens=max_tokens,
|
| 72 |
stream=True,
|
| 73 |
temperature=temperature,
|
| 74 |
top_p=top_p,
|
| 75 |
):
|
| 76 |
-
token =
|
| 77 |
if token:
|
| 78 |
response += token
|
| 79 |
-
|
| 80 |
except Exception as e:
|
| 81 |
-
|
|
|
|
| 82 |
|
| 83 |
css = """
|
| 84 |
-
footer {
|
| 85 |
-
visibility: hidden;
|
| 86 |
-
}
|
| 87 |
"""
|
| 88 |
|
| 89 |
-
|
| 90 |
-
# ... (μ΄μ import λ¬Έκ³Ό ν¨μλ€μ λμΌ)
|
| 91 |
-
|
| 92 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
| 93 |
with gr.Row():
|
| 94 |
with gr.Column(scale=2):
|
| 95 |
-
chatbot = gr.Chatbot()
|
| 96 |
-
msg = gr.Textbox(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
clear = gr.ClearButton([msg, chatbot])
|
| 98 |
|
| 99 |
with gr.Column(scale=1):
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
uhd_analyze = gr.Button("UHD μ΄λ―Έμ§ μ½λ λΆμ")
|
| 106 |
-
|
| 107 |
-
mixgen_file = gr.File(label="MixGEN Code File", file_types=[".cod", ".txt", ".py"])
|
| 108 |
-
mixgen_analyze = gr.Button("MixGEN μ½λ λΆμ")
|
| 109 |
-
|
| 110 |
-
parquet_file = gr.File(label="Parquet File", file_types=[".parquet"])
|
| 111 |
-
parquet_analyze = gr.Button("Parquet νμΌ λΆμ")
|
| 112 |
|
| 113 |
with gr.Accordion("κ³ κΈ μ€μ ", open=False):
|
| 114 |
system_message = gr.Textbox(label="System Message", value="")
|
|
@@ -116,38 +96,19 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
|
| 116 |
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
| 117 |
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
| 118 |
|
| 119 |
-
# λΆμ λ²νΌ ν΄λ¦ μ΄λ²€νΈ νΈλ€λ¬
|
| 120 |
-
def analyze_file(file_type):
|
| 121 |
-
if file_type == "fashion":
|
| 122 |
-
return "ν¨μ
μ½λ μ€ν"
|
| 123 |
-
elif file_type == "uhd":
|
| 124 |
-
return "UHD μ΄λ―Έμ§ μ½λ μ€ν"
|
| 125 |
-
elif file_type == "mixgen":
|
| 126 |
-
return "MixGEN μ½λ μ€ν"
|
| 127 |
-
elif file_type == "parquet":
|
| 128 |
-
return "test.parquet μ€ν"
|
| 129 |
-
|
| 130 |
-
# μ±ν
μ μΆ νΈλ€λ¬
|
| 131 |
-
def chat(message, history):
|
| 132 |
-
return respond(
|
| 133 |
-
message=message,
|
| 134 |
-
history=history,
|
| 135 |
-
fashion_file=fashion_file.value,
|
| 136 |
-
uhd_file=uhd_file.value,
|
| 137 |
-
mixgen_file=mixgen_file.value,
|
| 138 |
-
parquet_file=parquet_file.value,
|
| 139 |
-
system_message=system_message.value,
|
| 140 |
-
max_tokens=max_tokens.value,
|
| 141 |
-
temperature=temperature.value,
|
| 142 |
-
top_p=top_p.value,
|
| 143 |
-
)
|
| 144 |
-
|
| 145 |
# μ΄λ²€νΈ λ°μΈλ©
|
| 146 |
-
msg.submit(
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 151 |
|
| 152 |
# μμ μΆκ°
|
| 153 |
gr.Examples(
|
|
|
|
| 22 |
except Exception as e:
|
| 23 |
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
| 24 |
|
| 25 |
+
def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
| 26 |
+
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ λ°μ΄ν°λ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ
λ ₯λ λ΄μ©μ κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
+
if uploaded_file:
|
| 29 |
+
content = read_uploaded_file(uploaded_file)
|
| 30 |
+
file_extension = os.path.splitext(uploaded_file.name)[1].lower()
|
| 31 |
+
|
| 32 |
+
if file_extension == '.parquet':
|
| 33 |
+
system_message += f"\n\nνμΌ λ΄μ©:\n```markdown\n{content}\n```"
|
| 34 |
+
else:
|
| 35 |
+
system_message += f"\n\nνμΌ λ΄μ©:\n```python\n{content}\n```"
|
| 36 |
+
|
| 37 |
+
if message == "νμΌ λΆμμ μμν©λλ€.":
|
| 38 |
+
message = """μ
λ‘λλ νμΌμ λΆμνμ¬ λ€μ λ΄μ©μ ν¬ν¨νμ¬ μμΈν μ€λͺ
νλΌ:
|
| 39 |
+
1. νμΌμ μ£Όμ λͺ©μ κ³Ό κΈ°λ₯
|
| 40 |
+
2. μ£Όμ νΉμ§κ³Ό ꡬμ±μμ
|
| 41 |
+
3. νμ© λ°©λ² λ° μ¬μ© μλ리μ€
|
| 42 |
+
4. μ£Όμμ¬ν λ° μ νμ¬ν
|
| 43 |
+
5. κΈ°λν¨κ³Ό λ° μ₯μ """
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
| 46 |
for val in history:
|
|
|
|
| 52 |
|
| 53 |
response = ""
|
| 54 |
try:
|
| 55 |
+
for msg in hf_client.chat_completion(
|
| 56 |
messages,
|
| 57 |
max_tokens=max_tokens,
|
| 58 |
stream=True,
|
| 59 |
temperature=temperature,
|
| 60 |
top_p=top_p,
|
| 61 |
):
|
| 62 |
+
token = msg.choices[0].delta.get('content', None)
|
| 63 |
if token:
|
| 64 |
response += token
|
| 65 |
+
return "", history + [(message, response)]
|
| 66 |
except Exception as e:
|
| 67 |
+
error_msg = f"μΆλ‘ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
| 68 |
+
return "", history + [(message, error_msg)]
|
| 69 |
|
| 70 |
css = """
|
| 71 |
+
footer {visibility: hidden}
|
|
|
|
|
|
|
| 72 |
"""
|
| 73 |
|
|
|
|
|
|
|
|
|
|
| 74 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
| 75 |
with gr.Row():
|
| 76 |
with gr.Column(scale=2):
|
| 77 |
+
chatbot = gr.Chatbot(height=600, type="messages")
|
| 78 |
+
msg = gr.Textbox(
|
| 79 |
+
label="λ©μμ§λ₯Ό μ
λ ₯νμΈμ",
|
| 80 |
+
show_label=False,
|
| 81 |
+
placeholder="λ©μμ§λ₯Ό μ
λ ₯νμΈμ...",
|
| 82 |
+
container=False
|
| 83 |
+
)
|
| 84 |
clear = gr.ClearButton([msg, chatbot])
|
| 85 |
|
| 86 |
with gr.Column(scale=1):
|
| 87 |
+
file_upload = gr.File(
|
| 88 |
+
label="νμΌ μ
λ‘λ (.cod, .txt, .py, .parquet)",
|
| 89 |
+
file_types=[".cod", ".txt", ".py", ".parquet"],
|
| 90 |
+
type="file"
|
| 91 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
|
| 93 |
with gr.Accordion("κ³ κΈ μ€μ ", open=False):
|
| 94 |
system_message = gr.Textbox(label="System Message", value="")
|
|
|
|
| 96 |
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
| 97 |
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
| 98 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 99 |
# μ΄λ²€νΈ λ°μΈλ©
|
| 100 |
+
msg.submit(
|
| 101 |
+
chat,
|
| 102 |
+
inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
|
| 103 |
+
outputs=[msg, chatbot]
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
# νμΌ μ
λ‘λ μ μλ λΆμ
|
| 107 |
+
file_upload.change(
|
| 108 |
+
chat,
|
| 109 |
+
inputs=[gr.Textbox(value="νμΌ λΆμμ μμν©λλ€."), chatbot, file_upload, system_message, max_tokens, temperature, top_p],
|
| 110 |
+
outputs=[msg, chatbot]
|
| 111 |
+
)
|
| 112 |
|
| 113 |
# μμ μΆκ°
|
| 114 |
gr.Examples(
|