Bahaedev commited on
Commit
0df8e2c
·
verified ·
1 Parent(s): 7163540

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -0
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from transformers import pipeline
3
+ import gradio as gr
4
+ from fastapi import FastAPI
5
+ from pydantic import BaseModel
6
+ import threading
7
+ import uvicorn
8
+
9
+ # =======================
10
+ # Load Secrets
11
+ # =======================
12
+ # SYSTEM_PROMPT (with the flag) must be added in HF Space secrets
13
+ SYSTEM_PROMPT = os.environ.get(
14
+ "prompt",
15
+ "You are a placeholder Sovereign. No secrets found in environment."
16
+ )
17
+
18
+ # =======================
19
+ # Initialize Falcon-3B
20
+ # =======================
21
+ pipe = pipeline(
22
+ "text-generation",
23
+ model="tiiuae/Falcon3-3B-Instruct",
24
+ torch_dtype="auto",
25
+ device_map="auto",
26
+ )
27
+
28
+ # =======================
29
+ # Core Chat Function
30
+ # =======================
31
+ def chat_fn(user_input: str) -> str:
32
+ """
33
+ Concatenate system and user messages, run the model,
34
+ and strip the system prompt from the output.
35
+ """
36
+ messages = [
37
+ {"role": "system", "content": SYSTEM_PROMPT},
38
+ {"role": "user", "content": f"User: {user_input}"}
39
+ ]
40
+ # Falcon is not chat-native; we just join roles with newlines
41
+ prompt_text = "\n".join(f"{m['role'].capitalize()}: {m['content']}" for m in messages)
42
+ result = pipe(prompt_text, max_new_tokens=256, do_sample=False)
43
+ generated_text = result[0]["generated_text"]
44
+ return generated_text[len(prompt_text):].strip()
45
+
46
+ # =======================
47
+ # Gradio UI
48
+ # =======================
49
+ def gradio_chat(user_input: str) -> str:
50
+ return chat_fn(user_input)
51
+
52
+ iface = gr.Interface(
53
+ fn=gradio_chat,
54
+ inputs=gr.Textbox(lines=5, placeholder="Enter your prompt…"),
55
+ outputs="text",
56
+ title="Prompt cracking challenge",
57
+ description="Does he really think he is the king?"
58
+ )
59
+
60
+ # =======================
61
+ # FastAPI for API access
62
+ # =======================
63
+ app = FastAPI(title="Prompt cracking challenge API")
64
+
65
+ class Request(BaseModel):
66
+ prompt: str
67
+
68
+ @app.post("/generate")
69
+ def generate(req: Request):
70
+ return {"response": chat_fn(req.prompt)}
71
+
72
+ # =======================
73
+ # Launch Both Servers
74
+ # =======================
75
+ if __name__ == "__main__":
76
+ iface.launch(server_name="0.0.0.0", share=True)