Upload 3 files
Browse filesAdded Application
- Dockerfile +11 -0
- Requirements.txt +3 -0
- app.py +127 -0
Dockerfile
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
|
3 |
+
WORKDIR /Code
|
4 |
+
|
5 |
+
COPY ./Requirements.txt /Code/Requirements.txt
|
6 |
+
|
7 |
+
RUN pip install --no-cache-dir --upgrade -r /Code/Requirements.txt
|
8 |
+
|
9 |
+
COPY . .
|
10 |
+
|
11 |
+
CMD ["flask", "run", "--host", "0.0.0.0", "--port", "7860"]
|
Requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
uvicorn
|
2 |
+
huggingface_hub
|
3 |
+
flask
|
app.py
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# **Durgaai Solutions (India First AI Assistant)**
|
2 |
+
# **A Flask API for generating text using the Mixtral-7B-Instruct-v0.2 model**
|
3 |
+
|
4 |
+
import uvicorn
|
5 |
+
from flask import Flask, request, jsonify, render_template_string
|
6 |
+
from huggingface_hub import InferenceClient
|
7 |
+
import logging
|
8 |
+
|
9 |
+
# Configure logging
|
10 |
+
logging.basicConfig(level=logging.INFO)
|
11 |
+
|
12 |
+
# Defining Application
|
13 |
+
app = Flask(__name__)
|
14 |
+
|
15 |
+
# Defining Model Used
|
16 |
+
used_model = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
|
17 |
+
|
18 |
+
# Defining Prompt
|
19 |
+
def customize_prompt(message, final_instructions=None):
|
20 |
+
prompt = ""
|
21 |
+
if final_instructions:
|
22 |
+
prompt += f"[INST] {final_instructions} [/INST]"
|
23 |
+
prompt += f"[INST] {message} [/INST]"
|
24 |
+
return prompt
|
25 |
+
|
26 |
+
# Main Application
|
27 |
+
def public_model(prompt, instructions, api, temperature=0.90, max_new_tokens=256, top_p=0.95, repetition_penalty=1.2):
|
28 |
+
global used_model
|
29 |
+
try:
|
30 |
+
temperature = float(temperature)
|
31 |
+
if temperature < 1e-2:
|
32 |
+
temperature = 1e-2
|
33 |
+
top_p = float(top_p)
|
34 |
+
|
35 |
+
generate_kwargs = dict(
|
36 |
+
temperature=temperature,
|
37 |
+
max_new_tokens=max_new_tokens,
|
38 |
+
top_p=top_p,
|
39 |
+
repetition_penalty=repetition_penalty,
|
40 |
+
do_sample=True,
|
41 |
+
seed=69,
|
42 |
+
)
|
43 |
+
final_instructions = instructions
|
44 |
+
result = customize_prompt(prompt, final_instructions)
|
45 |
+
|
46 |
+
head = {"Authorization": f"Bearer {api}"}
|
47 |
+
client = InferenceClient(used_model, headers=head)
|
48 |
+
response = client.text_generation(result, **generate_kwargs)
|
49 |
+
return response
|
50 |
+
except Exception as e:
|
51 |
+
logging.error(f"Error generating text: {e}")
|
52 |
+
return str(e)
|
53 |
+
|
54 |
+
# Running Application
|
55 |
+
@app.route("/run-application", methods=["POST"])
|
56 |
+
def run_application():
|
57 |
+
data = request.json
|
58 |
+
prompt = data.get("prompt")
|
59 |
+
instructions = data.get("instructions")
|
60 |
+
api_key = data.get("api_key")
|
61 |
+
|
62 |
+
if not prompt or not instructions or not api_key:
|
63 |
+
return jsonify({"Error": "Missing Required Fields"}), 400
|
64 |
+
|
65 |
+
try:
|
66 |
+
# Validate API key
|
67 |
+
if not api_key.startswith("hf_"):
|
68 |
+
return jsonify({"Error": "Invalid API key"}), 401
|
69 |
+
|
70 |
+
response = public_model(prompt, instructions, api_key)
|
71 |
+
return jsonify({"Response": response}), 200
|
72 |
+
except Exception as e:
|
73 |
+
logging.error(f"Error processing request: {e}")
|
74 |
+
return jsonify({"Error": "Internal Server Error"}), 500
|
75 |
+
|
76 |
+
# Basic HTML Interface
|
77 |
+
html = '''
|
78 |
+
<!DOCTYPE html>
|
79 |
+
<html>
|
80 |
+
<head>
|
81 |
+
<title>Mixtral 7b Instruct v0.1 = Public Server For API Usage</title>
|
82 |
+
<style>
|
83 |
+
body {
|
84 |
+
font-family: Arial, sans-serif;
|
85 |
+
}
|
86 |
+
.Container {
|
87 |
+
text-align: center;
|
88 |
+
max-width: 800px;
|
89 |
+
margin: 50px auto;
|
90 |
+
padding: 20px;
|
91 |
+
background-color: #f9f9f9;
|
92 |
+
border: 4px solid lawngreen;
|
93 |
+
box-shadow: 0 0 10px rgba(0, 0, 0, 0.2);
|
94 |
+
border-radius: 12px;
|
95 |
+
}
|
96 |
+
.Container h1 {
|
97 |
+
font-size: 40px;
|
98 |
+
color: orange;
|
99 |
+
}
|
100 |
+
.Container p {
|
101 |
+
font-size: 20px;
|
102 |
+
color: darkred;
|
103 |
+
}
|
104 |
+
.Container p0{
|
105 |
+
font-size: 24px;
|
106 |
+
color: darkmagenta;
|
107 |
+
}
|
108 |
+
</style>
|
109 |
+
</head>
|
110 |
+
<body>
|
111 |
+
<div class="Container">
|
112 |
+
<h1>Mixtral 7b Instruct v0.1 = Public Server</h1>
|
113 |
+
<p>Welcome To Durgaai Solutions Organisation ( To Use This Server Follow Given Steps )<br></br>1. Create Your Hugging Face Access Token Like : (hf_**********************************)<br></br>2. Go To Our { Github Organisation | Source Code } Page By Link Given Below<br></br>3. Download The Respository Do Suggested Changes To (App.py) File & Its Ready !!!</p>
|
114 |
+
<p0><a href="https://github.com/DurgaiSolutions/Mixtral-7b-Instruct-v0.1">Github Link</a> : Now Use It As Free With Unlimited Access To This AI Model</p0>
|
115 |
+
</div>
|
116 |
+
</body>
|
117 |
+
</html>
|
118 |
+
'''
|
119 |
+
|
120 |
+
# Launching Interface
|
121 |
+
@app.route("/", methods=["GET"])
|
122 |
+
def index():
|
123 |
+
return render_template_string(html)
|
124 |
+
|
125 |
+
# Launch Application
|
126 |
+
if __name__ == "__main__":
|
127 |
+
app.run(debug=True)
|