Spaces:
Sleeping
Sleeping
import gradio as gr | |
import spaces | |
from peft import AutoPeftModelForCausalLM | |
from transformers import AutoTokenizer | |
# Define the prompt template | |
alpaca_prompt = """Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. | |
### Instruction: | |
{} | |
### Input: | |
{} | |
### Response: | |
{}""" | |
# Fixed instruction text | |
instruction_text = "You are a blogger named Artemiy Lebedev, your purpose is to generate a post in Russian based on the post article" | |
# Function to generate responses | |
def generate_response(input_text): | |
# Load the model and tokenizer within the GPU context | |
model = AutoPeftModelForCausalLM.from_pretrained( | |
"shakaryan/lebedev_qwen2.5", | |
load_in_4bit=True, # Adjust based on your setup | |
).to("cuda") | |
tokenizer = AutoTokenizer.from_pretrained("shakaryan/lebedev_qwen2.5") | |
EOS_TOKEN = tokenizer.eos_token # Ensure proper sequence termination | |
# Format the prompt | |
formatted_prompt = alpaca_prompt.format(instruction_text, input_text, "") | |
# Tokenize and generate response | |
inputs = tokenizer(formatted_prompt, return_tensors="pt").to("cuda") | |
outputs = model.generate(**inputs, max_new_tokens=256, use_cache=True) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Extract part after ### Response: | |
response_start = response.find("### Response:") + len("### Response:\n") | |
response_clean = response[response_start:].replace("<|im_end|>", "").strip() | |
return response_clean | |
# Define the Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown( | |
""" | |
### Генератор постов в стиле Артемия Лебедева | |
Этот генератор создает посты в стиле Артемия Лебедева. | |
Попробуйте написать заголовок поста, и генератор создаст текст. | |
Подробнее о стиле: [Артемий Лебедев в Telegram](https://t.me/temalebedev) \n | |
Телеграм канал автора: [Гегам Шакарян - ИИ в лаваше](https://t.me/ai_in_lavash) | |
""" | |
) | |
with gr.Row(): | |
input_text = gr.Textbox( | |
label="Заголовок поста", | |
placeholder="Введите заголовок поста здесь...", | |
lines=5, | |
) | |
with gr.Row(): | |
output = gr.Textbox(label="Сгенерированный пост", lines=10) | |
with gr.Row(): | |
generate_button = gr.Button("Сгенерировать") | |
generate_button.click( | |
fn=generate_response, | |
inputs=[input_text], | |
outputs=output | |
) | |
# Launch the app | |
demo.launch() | |