Spaces:
Running
Running
# -*- coding: utf-8 -*- | |
import gradio as gr | |
from huggingface_hub import InferenceClient | |
from gradio_client import Client | |
import os | |
import requests | |
import asyncio | |
import logging | |
from concurrent.futures import ThreadPoolExecutor | |
# λ‘κΉ μ€μ | |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') | |
# API μ€μ | |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN")) | |
IMAGE_API_URL = "http://211.233.58.201:7896" | |
def generate_image(prompt: str) -> tuple: | |
"""μ΄λ―Έμ§ μμ± ν¨μ""" | |
try: | |
client = Client(IMAGE_API_URL) | |
# ν둬ννΈ μμ "fantasy style," μΆκ° | |
enhanced_prompt = f"fantasy style, {prompt}" | |
result = client.predict( | |
prompt=enhanced_prompt, | |
width=768, | |
height=768, | |
guidance=7.5, | |
inference_steps=30, | |
seed=3, | |
do_img2img=False, | |
init_image=None, | |
image2image_strength=0.8, | |
resize_img=True, | |
api_name="/generate_image" | |
) | |
return result[0], result[1] | |
except Exception as e: | |
logging.error(f"Image generation failed: {str(e)}") | |
return None, f"Error: {str(e)}" | |
def respond( | |
message, | |
history: list[tuple[str, str]], | |
system_message="", | |
max_tokens=7860, | |
temperature=0.8, | |
top_p=0.9, | |
): | |
system_prefix = """ | |
[μμ€ν ν둬ννΈ λ΄μ©...] | |
""" | |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}] | |
for val in history: | |
if val[0]: | |
messages.append({"role": "user", "content": val[0]}) | |
if val[1]: | |
messages.append({"role": "assistant", "content": val[1]}) | |
messages.append({"role": "user", "content": message}) | |
response = "" | |
try: | |
for message in hf_client.chat_completion( | |
messages, | |
max_tokens=max_tokens, | |
stream=True, | |
temperature=temperature, | |
top_p=top_p, | |
): | |
token = message.choices[0].delta.content | |
if token is not None: | |
response += token.strip("") | |
yield response, None # μ΄λ―Έμ§λ₯Ό μν None μΆκ° | |
# ν μ€νΈ μμ±μ΄ μλ£λ ν μ΄λ―Έμ§ μμ± | |
image, seed = generate_image(response[:200]) # μ²μ 200μλ₯Ό μ΄λ―Έμ§ ν둬ννΈλ‘ μ¬μ© | |
yield response, image | |
except Exception as e: | |
yield f"Error: {str(e)}", None | |
# Gradio μΈν°νμ΄μ€ μ€μ | |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange") as interface: | |
gr.Markdown("# Fantasy Novel AI Generation") | |
with gr.Row(): | |
with gr.Column(scale=2): | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox(label="Enter your message") | |
system_msg = gr.Textbox(label="System Message", value="Write(output) in νκ΅μ΄.") | |
with gr.Row(): | |
max_tokens = gr.Slider(minimum=1, maximum=8000, value=7000, label="Max Tokens") | |
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature") | |
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P") | |
with gr.Column(scale=1): | |
image_output = gr.Image(label="Generated Image") | |
examples = gr.Examples( | |
examples=[ | |
["ννμ§ μμ€μ ν₯λ―Έλ‘μ΄ μμ¬ 10κ°μ§λ₯Ό μ μνλΌ"], | |
["κ³μ μ΄μ΄μ μμ±νλΌ"], | |
["Translate into English"], | |
["λ§λ² μμ€ν μ λν΄ λ μμΈν μ€λͺ νλΌ"], | |
["μ ν¬ μ₯λ©΄μ λ κ·Ήμ μΌλ‘ λ¬μ¬νλΌ"], | |
["μλ‘μ΄ ννμ§ μ’ μ‘±μ μΆκ°νλΌ"], | |
["κ³ λ μμΈμ λν΄ λ μμΈν μ€λͺ νλΌ"], | |
["μ£ΌμΈκ³΅μ λ΄λ©΄ λ¬μ¬λ₯Ό μΆκ°νλΌ"], | |
], | |
inputs=msg | |
) | |
msg.submit( | |
respond, | |
[msg, chatbot, system_msg, max_tokens, temperature, top_p], | |
[chatbot, image_output] | |
) | |
# μ ν리μΌμ΄μ μ€ν | |
if __name__ == "__main__": | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=True | |
) |