Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -51,7 +51,7 @@ pipe_edit.to("cuda")
|
|
51 |
|
52 |
def promptifier(prompt):
|
53 |
client1 = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
|
54 |
-
system_instructions1 = "<s>[SYSTEM] Your task is to modify prompt by USER to more better prompt for Image Generation in Stable Diffusion XL, you have to optiomize prompt and also add some keywords like, 4k, realistic, featuristic according to prompt
|
55 |
formatted_prompt = f"{system_instructions1} {prompt} [PROMPT]"
|
56 |
stream = client1.text_generation(formatted_prompt, max_new_tokens=80, stream=True, details=True, return_full_text=False)
|
57 |
return "".join([response.token.text for response in stream if response.token.text != "</s>"])
|
|
|
51 |
|
52 |
def promptifier(prompt):
|
53 |
client1 = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
|
54 |
+
system_instructions1 = "<s>[SYSTEM] Your task is to modify prompt by USER to more better prompt for Image Generation in Stable Diffusion XL, you have to optiomize prompt and also add some keywords like, 4k, realistic, featuristic according to prompt. Your task is to reply with final optimized prompt only. Just reply with optimized prompt only.[USER]"
|
55 |
formatted_prompt = f"{system_instructions1} {prompt} [PROMPT]"
|
56 |
stream = client1.text_generation(formatted_prompt, max_new_tokens=80, stream=True, details=True, return_full_text=False)
|
57 |
return "".join([response.token.text for response in stream if response.token.text != "</s>"])
|