File size: 5,893 Bytes
705c5b5 f9b9d56 83ee74c 574f73e 705c5b5 83ee74c f9b9d56 705c5b5 f9b9d56 705c5b5 0997082 705c5b5 0997082 705c5b5 83ee74c 705c5b5 83ee74c 705c5b5 f2c0975 705c5b5 0997082 705c5b5 0997082 705c5b5 0997082 705c5b5 83ee74c 0997082 705c5b5 f2c0975 83ee74c 0997082 83ee74c 705c5b5 0997082 705c5b5 f2c0975 705c5b5 f2c0975 705c5b5 f9b9d56 705c5b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import gradio as gr
from huggingface_hub import InferenceClient
import os
import pandas as pd
from typing import List, Tuple
# μΆλ‘ API ν΄λΌμ΄μΈνΈ μ€μ
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
def read_uploaded_file(file):
if file is None:
return ""
try:
if file.name.endswith('.parquet'):
df = pd.read_parquet(file.name, engine='pyarrow')
return df.head(10).to_markdown(index=False)
else:
content = file.read()
if isinstance(content, bytes):
return content.decode('utf-8')
return content
except Exception as e:
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
def respond(
message,
history: List[Tuple[str, str]],
fashion_file, # νμΌ μ
λ‘λ μ
λ ₯
uhd_file, # νμΌ μ
λ‘λ μ
λ ₯
mixgen_file, # νμΌ μ
λ‘λ μ
λ ₯
parquet_file, # νμΌ μ
λ‘λ μ
λ ₯
system_message="",
max_tokens=1024,
temperature=0.7,
top_p=0.9,
):
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
if message.lower() == "ν¨μ
μ½λ μ€ν" and fashion_file is not None:
fashion_content = read_uploaded_file(fashion_file)
system_message += f"\n\nν¨μ
μ½λ λ΄μ©:\n```python\n{fashion_content}\n```"
message = "ν¨μ
κ°μνΌν
μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://aiqcamp-fash.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "uhd μ΄λ―Έμ§ μ½λ μ€ν" and uhd_file is not None:
uhd_content = read_uploaded_file(uhd_file)
system_message += f"\n\nUHD μ΄λ―Έμ§ μ½λ λ΄μ©:\n```python\n{uhd_content}\n```"
message = "UHD μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-ultpixgen.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "mixgen μ½λ μ€ν" and mixgen_file is not None:
mixgen_content = read_uploaded_file(mixgen_file)
system_message += f"\n\nMixGEN μ½λ λ΄μ©:\n```python\n{mixgen_content}\n```"
message = "MixGEN3 μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-mixgen3.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "test.parquet μ€ν" and parquet_file is not None:
parquet_content = read_uploaded_file(parquet_file)
system_message += f"\n\ntest.parquet νμΌ λ΄μ©:\n```markdown\n{parquet_content}\n```"
message = "test.parquet νμΌμ λν λ΄μ©μ νμ΅νμκ³ , κ΄λ ¨ μ€λͺ
λ° Q&Aλ₯Ό μ§νν μ€λΉκ° λμ΄μλ€. κΆκΈν μ μ΄ μμΌλ©΄ λ¬Όμ΄λ³΄λΌ."
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
try:
for message in hf_client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.get('content', None)
if token:
response += token
yield response
except Exception as e:
yield f"μΆλ‘ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
# Gradio μΈν°νμ΄μ€ μ€μ
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.File(label="Fashion Code File", file_types=[".cod", ".txt", ".py"]),
gr.File(label="UHD Image Code File", file_types=[".cod", ".txt", ".py"]),
gr.File(label="MixGEN Code File", file_types=[".cod", ".txt", ".py"]),
gr.File(label="Parquet File", file_types=[".parquet"]),
gr.Textbox(label="System Message", value=""),
gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens"),
gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
],
examples=[
["ν¨μ
μ½λ μ€ν"],
["UHD μ΄λ―Έμ§ μ½λ μ€ν"],
["MixGEN μ½λ μ€ν"],
["test.parquet μ€ν"],
["μμΈν μ¬μ© λ°©λ²μ λ§μΉ νλ©΄μ 보면μ μ€λͺ
νλ―μ΄ 4000 ν ν° μ΄μ μμΈν μ€λͺ
νλΌ"],
["FAQ 20건μ μμΈνκ² μμ±νλΌ. 4000ν ν° μ΄μ μ¬μ©νλΌ."],
["μ¬μ© λ°©λ²κ³Ό μ°¨λ³μ , νΉμ§, κ°μ μ μ€μ¬μΌλ‘ 4000 ν ν° μ΄μ μ νλΈ μμ μ€ν¬λ¦½νΈ ννλ‘ μμ±νλΌ"],
["λ³Έ μλΉμ€λ₯Ό SEO μ΅μ ννμ¬ λΈλ‘κ·Έ ν¬μ€νΈλ‘ 4000 ν ν° μ΄μ μμ±νλΌ"],
["νΉν μΆμμ νμ©ν κΈ°μ λ° λΉμ¦λμ€λͺ¨λΈ μΈ‘λ©΄μ ν¬ν¨νμ¬ νΉν μΆμμ ꡬμ±μ λ§κ² μμ±νλΌ"],
["κ³μ μ΄μ΄μ λ΅λ³νλΌ"],
],
theme="Nymbo/Nymbo_Theme",
cache_examples=False,
)
if __name__ == "__main__":
demo.launch()
|