tianlong12 commited on
Commit
c1d6aa2
·
verified ·
1 Parent(s): 520b9c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +125 -114
app.py CHANGED
@@ -1,29 +1,23 @@
1
-
2
- import os
3
  import time
4
  import json
5
- import uuid
6
- import asyncio
7
- import aiohttp
8
- import random
9
  import re
 
 
 
10
  from flask import Flask, request, Response
11
  from flask_cors import CORS
 
12
 
13
  app = Flask(__name__)
14
- CORS(app, resources={r"/*": {"origins": "*"}})
15
 
16
- # Load environment variables
17
  MAGAI_TOKEN = {
18
  "cookie": "_fbp=fb.1.1722349051350.28463402121267809;soquick-mobile_u1main=1722349236461x685414888067651300;intercom-id-jnjoad6e=cbbd8fc9-a010-4e8c-8e7e-9cffccd3abea;soquick-mobile_live_u2main.sig=HuQePfrEHGidu4eRyfiZkcL1_2E;__stripe_mid=7767e1a3-e87f-4456-b073-6c8b7ae9e82119b00d;__stripe_sid=99c612a5-a12a-426f-baa5-e61471a013f140c482;_ga=GA1.1.242967908.1722349051;_ga_GFQ25YSHT2=GS1.1.1726123356.1.0.1726123393.0.0.0;_ga_N5J29RVHDJ=GS1.1.1726123395.4.1.1726124637.0.0.0;intercom-device-id-jnjoad6e=35ee824e-f7f6-415d-8698-bd822cb46d3a;intercom-session-jnjoad6e=SXlhUDdqa0E5YTlmUVA2QXk3K3hBSXFNSnRzL2x0cEtvVDF3U1k3UlRCQmhPRVdUcktDYkpwVXpiZFA1SzhUSi0tTFlCazFUamcxbExFRU1LTVlWSitVQT09--40f0dca176cfb28add2903465c54bcee4f33de2b;soquick-mobile_live_u2main=bus|1722349236461x685414888067651300|1726123417637x655253536227564700",
19
  "app_last_change": "21388518093",
20
  "current_page_item": "1348695171700984260__LOOKUP__1726124636560x692535552825360400",
21
  "current_user": "1348695171700984260__LOOKUP__1722349236461x685414888067651300",
22
  }
23
- MAGAI_COOKIE = MAGAI_TOKEN["cookie"]
24
- MAGAI_APP_LAST_CHANGE = MAGAI_TOKEN["app_last_change"]
25
- MAGAI_CURRENT_PAGE_ITEM = MAGAI_TOKEN["current_page_item"]
26
- MAGAI_CURRENT_USER = MAGAI_TOKEN["current_user"]
27
 
28
  MAGAI_MAPPING = {
29
  "gpt-4o": "openai/gpt-4o",
@@ -45,6 +39,7 @@ def create_luid(separator="x"):
45
  def format_model_name(model_name):
46
  return re.sub(r"_+", "_", re.sub(r"[/:-]", "_", model_name))
47
 
 
48
  def find_token_in_object(obj):
49
  if isinstance(obj, dict):
50
  for key, value in obj.items():
@@ -55,22 +50,24 @@ def find_token_in_object(obj):
55
  return token
56
  return None
57
 
 
58
  def get_last_user_content(messages):
59
  for message in reversed(messages):
60
  if message["role"] == "user":
61
  return message["content"]
62
  return None
63
 
 
64
  async def get_token(model, message):
65
  server_call_id = generate_uuid()
66
- created_id = MAGAI_CURRENT_PAGE_ITEM.split("__")[0]
67
- user_id = MAGAI_CURRENT_USER.split("__")[2]
68
  model_id = "0060f9accd1dbade552f65ac646fb3da"
69
  item_id = "bUNih7"
70
  element_id = "bUNib7"
71
 
72
  body = {
73
- "app_last_change": MAGAI_APP_LAST_CHANGE,
74
  "calls": [
75
  {
76
  "client_state": {
@@ -158,8 +155,8 @@ async def get_token(model, message):
158
  "cache": {
159
  f"{model_id}": format_model_name(model),
160
  "true": True,
161
- "CurrentPageItem": MAGAI_CURRENT_PAGE_ITEM,
162
- "CurrentUser": MAGAI_CURRENT_USER,
163
  },
164
  "exists": {
165
  f"{model_id}": True,
@@ -172,6 +169,7 @@ async def get_token(model, message):
172
  "server_call_id": server_call_id,
173
  "item_id": item_id,
174
  "element_id": element_id,
 
175
  "uid_generator": {
176
  "timestamp": int(time.time() * 1000),
177
  "seed": round(random.random() * UUID_LENGTH) % MODULO,
@@ -196,7 +194,7 @@ async def get_token(model, message):
196
  "x-bubble-fiber-id": generate_uuid(),
197
  "x-bubble-pl": create_luid(),
198
  "accept": "application/json, text/javascript, */*; q=0.01",
199
- "cookie": MAGAI_COOKIE,
200
  },
201
  json=body,
202
  ) as response:
@@ -214,113 +212,126 @@ async def get_token(model, message):
214
  if token:
215
  return token
216
 
 
217
  async def get_request_data(model, messages):
218
  if model not in MAGAI_MAPPING:
219
- return None, "Model not available"
 
 
 
 
 
 
 
 
 
 
 
220
 
221
  last_user_message = get_last_user_content(messages)
222
- if not last_user_message:
223
- return None, "No user message found"
224
-
225
- try:
226
- token = await get_token(MAGAI_MAPPING[model], last_user_message)
227
- if not token:
228
- return None, "Failed to obtain token"
229
-
230
- headers = {
231
- "Content-Type": "application/json",
232
- "HTTP-Referer": "https://magai.co",
233
- "Origin": "https://app.magai.co",
234
- "Pragma": "no-cache",
235
- "Referer": "https://app.magai.co/",
236
- "Token": token,
237
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/500.00 (KHTML, like Gecko) Chrome/100.0.0.0 Safari/500.00",
238
- }
239
-
240
- json_data = {
241
- "model": MAGAI_MAPPING[model],
242
- "messages": [{"role": "system", "content": "You are a helpful assistant."}] + messages,
243
- "tools": [
244
- {
245
- "type": "function",
246
- "function": {
247
- "name": "get_actual_time_info",
248
- "description": "Returns actual information from web about prompt theme.",
249
- "parameters": {
250
- "type": "object",
251
- "properties": {
252
- "query": {
253
- "type": "string",
254
- "description": "The query string based on users prompt to search information about.",
255
- }
256
- },
257
- "required": ["query"],
258
  },
 
259
  },
260
  },
261
- {
262
- "type": "function",
263
- "function": {
264
- "name": "generate_image",
265
- "description": "Returns generated image URL.",
266
- "parameters": {
267
- "type": "object",
268
- "properties": {
269
- "query": {
270
- "type": "string",
271
- "description": "Prompt to image generation AI model, that describes what image to generate.",
272
- }
273
- },
274
- "required": ["query"],
275
  },
 
276
  },
277
  },
278
- ],
279
- "provider": {"data_collection": "deny"},
280
- "tool_choice": "auto",
281
- "stream": True,
282
- }
283
-
284
- async with aiohttp.ClientSession() as session:
285
- async with session.post(
286
- "https://live.proxy.magai.co:4430/opr/api/v1/chat/completions",
287
- headers=headers,
288
- json=json_data,
289
- ) as response:
290
- return response, None
291
- except Exception as e:
292
- return None, str(e)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
293
 
294
  @app.route("/hf/v1/chat/completions", methods=["POST"])
295
- async def chat_completions():
296
- try:
297
- data = request.json
298
- messages = data.get("messages", [])
299
- model = data.get("model", "claude-3.5-sonnet")
300
-
301
- response, error = await get_request_data(model, messages)
302
- if error:
303
- return Response(json.dumps({"error": error}), status=400, mimetype="application/json")
304
-
305
- async def generate():
306
- async for line in response.content:
307
- if line:
308
- decoded_line = line.decode("utf-8")
309
- if decoded_line.startswith("data:"):
310
- try:
311
- data = json.loads(decoded_line[5:].strip())
312
- if "choices" in data and len(data["choices"]) > 0:
313
- delta = data["choices"][0].get("delta", {})
314
- if "content" in delta:
315
- content = delta["content"].replace("\n", "\\n")
316
- yield f'data:{{"id":"{uuid.uuid4()}","object":"chat.completion.chunk","created":{int(time.time())},"model":"{model}","system_fingerprint":"fp_45ah8ld5a7","choices":[{{"index":0,"delta":{{"content":"{content}"}},"logprobs":null,"finish_reason":null}}]}}\n\n'
317
- except json.JSONDecodeError:
318
- pass
319
- yield "data:[DONE]\n"
320
-
321
- return Response(generate(), mimetype="text/event-stream")
322
- except Exception as e:
323
- return Response(json.dumps({"error": str(e)}), status=500, mimetype="application/json")
324
 
325
  if __name__ == "__main__":
326
- app.run(host="0.0.0.0", port=7860)
 
1
+ import random
 
2
  import time
3
  import json
 
 
 
 
4
  import re
5
+ import aiohttp
6
+ import requests
7
+ import asyncio
8
  from flask import Flask, request, Response
9
  from flask_cors import CORS
10
+ import uuid
11
 
12
  app = Flask(__name__)
13
+ CORS(app)
14
 
 
15
  MAGAI_TOKEN = {
16
  "cookie": "_fbp=fb.1.1722349051350.28463402121267809;soquick-mobile_u1main=1722349236461x685414888067651300;intercom-id-jnjoad6e=cbbd8fc9-a010-4e8c-8e7e-9cffccd3abea;soquick-mobile_live_u2main.sig=HuQePfrEHGidu4eRyfiZkcL1_2E;__stripe_mid=7767e1a3-e87f-4456-b073-6c8b7ae9e82119b00d;__stripe_sid=99c612a5-a12a-426f-baa5-e61471a013f140c482;_ga=GA1.1.242967908.1722349051;_ga_GFQ25YSHT2=GS1.1.1726123356.1.0.1726123393.0.0.0;_ga_N5J29RVHDJ=GS1.1.1726123395.4.1.1726124637.0.0.0;intercom-device-id-jnjoad6e=35ee824e-f7f6-415d-8698-bd822cb46d3a;intercom-session-jnjoad6e=SXlhUDdqa0E5YTlmUVA2QXk3K3hBSXFNSnRzL2x0cEtvVDF3U1k3UlRCQmhPRVdUcktDYkpwVXpiZFA1SzhUSi0tTFlCazFUamcxbExFRU1LTVlWSitVQT09--40f0dca176cfb28add2903465c54bcee4f33de2b;soquick-mobile_live_u2main=bus|1722349236461x685414888067651300|1726123417637x655253536227564700",
17
  "app_last_change": "21388518093",
18
  "current_page_item": "1348695171700984260__LOOKUP__1726124636560x692535552825360400",
19
  "current_user": "1348695171700984260__LOOKUP__1722349236461x685414888067651300",
20
  }
 
 
 
 
21
 
22
  MAGAI_MAPPING = {
23
  "gpt-4o": "openai/gpt-4o",
 
39
  def format_model_name(model_name):
40
  return re.sub(r"_+", "_", re.sub(r"[/:-]", "_", model_name))
41
 
42
+
43
  def find_token_in_object(obj):
44
  if isinstance(obj, dict):
45
  for key, value in obj.items():
 
50
  return token
51
  return None
52
 
53
+
54
  def get_last_user_content(messages):
55
  for message in reversed(messages):
56
  if message["role"] == "user":
57
  return message["content"]
58
  return None
59
 
60
+
61
  async def get_token(model, message):
62
  server_call_id = generate_uuid()
63
+ created_id = MAGAI_TOKEN["current_page_item"].split("__")[0]
64
+ user_id = MAGAI_TOKEN["current_user"].split("__")[2]
65
  model_id = "0060f9accd1dbade552f65ac646fb3da"
66
  item_id = "bUNih7"
67
  element_id = "bUNib7"
68
 
69
  body = {
70
+ "app_last_change": MAGAI_TOKEN["app_last_change"],
71
  "calls": [
72
  {
73
  "client_state": {
 
155
  "cache": {
156
  f"{model_id}": format_model_name(model),
157
  "true": True,
158
+ "CurrentPageItem": MAGAI_TOKEN["current_page_item"],
159
+ "CurrentUser": MAGAI_TOKEN["current_user"],
160
  },
161
  "exists": {
162
  f"{model_id}": True,
 
169
  "server_call_id": server_call_id,
170
  "item_id": item_id,
171
  "element_id": element_id,
172
+ "page_id": "bTekm",
173
  "uid_generator": {
174
  "timestamp": int(time.time() * 1000),
175
  "seed": round(random.random() * UUID_LENGTH) % MODULO,
 
194
  "x-bubble-fiber-id": generate_uuid(),
195
  "x-bubble-pl": create_luid(),
196
  "accept": "application/json, text/javascript, */*; q=0.01",
197
+ "cookie": MAGAI_TOKEN["cookie"],
198
  },
199
  json=body,
200
  ) as response:
 
212
  if token:
213
  return token
214
 
215
+
216
  async def get_request_data(model, messages):
217
  if model not in MAGAI_MAPPING:
218
+ return Response(
219
+ json.dumps(
220
+ {
221
+ "error": {
222
+ "message": "This model is currently unavailable. Please try again later or choose another model.",
223
+ "code": "model_not_exists",
224
+ }
225
+ }
226
+ ),
227
+ status=400,
228
+ mimetype="application/json",
229
+ )
230
 
231
  last_user_message = get_last_user_content(messages)
232
+ token = await get_token(MAGAI_MAPPING[model], last_user_message)
233
+ headers = {
234
+ "Content-Type": "application/json",
235
+ "HTTP-Referer": "https://magai.co",
236
+ "Origin": "https://app.magai.co",
237
+ "Pragma": "no-cache",
238
+ "Referer": "https://app.magai.co/",
239
+ "Token": token,
240
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/500.00 (KHTML, like Gecko) Chrome/100.0.0.0 Safari/500.00",
241
+ }
242
+
243
+ json_data = {
244
+ "model": MAGAI_MAPPING[model],
245
+ "messages": [{"role": "system", "content": "You are a helpful assistant."}]
246
+ + messages,
247
+ "tools": [
248
+ {
249
+ "type": "function",
250
+ "function": {
251
+ "name": "get_actual_time_info",
252
+ "description": "Returns actual information from web about prompt theme.",
253
+ "parameters": {
254
+ "type": "object",
255
+ "properties": {
256
+ "query": {
257
+ "type": "string",
258
+ "description": "The query string based on users prompt to search information about.",
259
+ }
 
 
 
 
 
 
 
 
260
  },
261
+ "required": ["query"],
262
  },
263
  },
264
+ },
265
+ {
266
+ "type": "function",
267
+ "function": {
268
+ "name": "generate_image",
269
+ "description": "Returns generated image URL.",
270
+ "parameters": {
271
+ "type": "object",
272
+ "properties": {
273
+ "query": {
274
+ "type": "string",
275
+ "description": "Prompt to image generation AI model, that describes what image to generate.",
276
+ }
 
277
  },
278
+ "required": ["query"],
279
  },
280
  },
281
+ },
282
+ ],
283
+ "provider": {"data_collection": "deny"},
284
+ "tool_choice": "auto",
285
+ "stream": True,
286
+ }
287
+
288
+ response = requests.post(
289
+ "https://live.proxy.magai.co:4430/opr/api/v1/chat/completions",
290
+ headers=headers,
291
+ json=json_data,
292
+ )
293
+ return response
294
+
295
+
296
+ def format_response(response):
297
+ content = ""
298
+ for line in response.iter_lines():
299
+ if line:
300
+ decoded_line = line.decode("utf-8")
301
+ if decoded_line.startswith("data:"):
302
+ try:
303
+ data = json.loads(decoded_line[5:].strip())
304
+ if "choices" in data and len(data["choices"]) > 0:
305
+ delta = data["choices"][0].get("delta", {})
306
+ if "content" in delta:
307
+ content += delta["content"]
308
+ except json.JSONDecodeError:
309
+ pass
310
+ return content
311
+
312
 
313
  @app.route("/hf/v1/chat/completions", methods=["POST"])
314
+ def chat_completions():
315
+ data = request.json
316
+ messages = data.get("messages", [])
317
+ model = data.get("model", "claude-3.5-sonnet")
318
+
319
+ async def process_request():
320
+ response = await get_request_data(model, messages)
321
+ return format_response(response)
322
+
323
+ loop = asyncio.new_event_loop()
324
+ asyncio.set_event_loop(loop)
325
+ result = loop.run_until_complete(process_request())
326
+
327
+ event_stream_response = ""
328
+ for part in result:
329
+ part = part.replace("\n", "\\n")
330
+ event_stream_response += f'data:{{"id":"{uuid.uuid4()}","object":"chat.completion.chunk","created":{int(time.time())},"model":"{model}","system_fingerprint":"fp_45ah8ld5a7","choices":[{{"index":0,"delta":{{"content":"{part}"}},"logprobs":null,"finish_reason":null}}]}}\n\n'
331
+ event_stream_response += "data:[DONE]\n"
332
+
333
+ return Response(event_stream_response, mimetype="text/event-stream")
334
+
 
 
 
 
 
 
 
 
335
 
336
  if __name__ == "__main__":
337
+ app.run(host="0.0.0.0", port=7860)