Upload index.js
Browse files
index.js
ADDED
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createServerAdapter } from '@whatwg-node/server'
|
2 |
+
import { AutoRouter, json, error, cors } from 'itty-router'
|
3 |
+
import { createServer } from 'http'
|
4 |
+
import dotenv from 'dotenv'
|
5 |
+
|
6 |
+
dotenv.config()
|
7 |
+
|
8 |
+
class Config {
|
9 |
+
constructor() {
|
10 |
+
this.API_PREFIX = process.env.API_PREFIX || '/'
|
11 |
+
this.API_KEY = process.env.API_KEY || ''
|
12 |
+
this.MAX_RETRY_COUNT = process.env.MAX_RETRY_COUNT || 3
|
13 |
+
this.RETRY_DELAY = process.env.RETRY_DELAY || 5000
|
14 |
+
this.FAKE_HEADERS = process.env.FAKE_HEADERS || {
|
15 |
+
Accept: '*/*',
|
16 |
+
'Accept-Encoding': 'gzip, deflate, br, zstd',
|
17 |
+
'Accept-Language': 'zh-CN,zh;q=0.9',
|
18 |
+
Origin: 'https://duckduckgo.com/',
|
19 |
+
Cookie: 'l=wt-wt; ah=wt-wt; dcm=6',
|
20 |
+
Dnt: '1',
|
21 |
+
Priority: 'u=1, i',
|
22 |
+
Referer: 'https://duckduckgo.com/',
|
23 |
+
'Sec-Ch-Ua': '"Microsoft Edge";v="129", "Not(A:Brand";v="8", "Chromium";v="129"',
|
24 |
+
'Sec-Ch-Ua-Mobile': '?0',
|
25 |
+
'Sec-Ch-Ua-Platform': '"Windows"',
|
26 |
+
'Sec-Fetch-Dest': 'empty',
|
27 |
+
'Sec-Fetch-Mode': 'cors',
|
28 |
+
'Sec-Fetch-Site': 'same-origin',
|
29 |
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36',
|
30 |
+
}
|
31 |
+
}
|
32 |
+
}
|
33 |
+
|
34 |
+
const config = new Config()
|
35 |
+
|
36 |
+
const { preflight, corsify } = cors({
|
37 |
+
origin: '*',
|
38 |
+
allowMethods: '*',
|
39 |
+
exposeHeaders: '*',
|
40 |
+
})
|
41 |
+
|
42 |
+
const withBenchmarking = (request) => {
|
43 |
+
request.start = Date.now()
|
44 |
+
}
|
45 |
+
|
46 |
+
const withAuth = (request) => {
|
47 |
+
if (config.API_KEY) {
|
48 |
+
const authHeader = request.headers.get('Authorization')
|
49 |
+
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
50 |
+
return error(401, 'Unauthorized: Missing or invalid Authorization header')
|
51 |
+
}
|
52 |
+
const token = authHeader.substring(7)
|
53 |
+
if (token !== config.API_KEY) {
|
54 |
+
return error(403, 'Forbidden: Invalid API key')
|
55 |
+
}
|
56 |
+
}
|
57 |
+
}
|
58 |
+
|
59 |
+
const logger = (res, req) => {
|
60 |
+
console.log(req.method, res.status, req.url, Date.now() - req.start, 'ms')
|
61 |
+
}
|
62 |
+
|
63 |
+
const router = AutoRouter({
|
64 |
+
before: [withBenchmarking, preflight, withAuth],
|
65 |
+
missing: () => error(404, '404 not found.'),
|
66 |
+
finally: [corsify, logger],
|
67 |
+
})
|
68 |
+
|
69 |
+
router.get('/', () => json({ message: 'API 服务运行中~' }))
|
70 |
+
router.get('/ping', () => json({ message: 'pong' }))
|
71 |
+
router.get(config.API_PREFIX + '/v1/models', () =>
|
72 |
+
json({
|
73 |
+
object: 'list',
|
74 |
+
data: [
|
75 |
+
{ id: 'gpt-4o-mini', object: 'model', owned_by: 'ddg' },
|
76 |
+
{ id: 'claude-3-haiku', object: 'model', owned_by: 'ddg' },
|
77 |
+
{ id: 'llama-3.1-70b', object: 'model', owned_by: 'ddg' },
|
78 |
+
{ id: 'mixtral-8x7b', object: 'model', owned_by: 'ddg' },
|
79 |
+
],
|
80 |
+
})
|
81 |
+
)
|
82 |
+
|
83 |
+
router.post(config.API_PREFIX + '/v1/chat/completions', (req) => handleCompletion(req))
|
84 |
+
|
85 |
+
async function handleCompletion(request) {
|
86 |
+
try {
|
87 |
+
const { model: inputModel, messages, stream: returnStream } = await request.json()
|
88 |
+
const model = convertModel(inputModel)
|
89 |
+
const content = messagesPrepare(messages)
|
90 |
+
return createCompletion(model, content, returnStream)
|
91 |
+
} catch (err) {
|
92 |
+
error(500, err.message)
|
93 |
+
}
|
94 |
+
}
|
95 |
+
|
96 |
+
async function createCompletion(model, content, returnStream, retryCount = 0) {
|
97 |
+
const token = await requestToken()
|
98 |
+
try {
|
99 |
+
const response = await fetch(`https://duckduckgo.com/duckchat/v1/chat`, {
|
100 |
+
method: 'POST',
|
101 |
+
headers: {
|
102 |
+
...config.FAKE_HEADERS,
|
103 |
+
Accept: 'text/event-stream',
|
104 |
+
'Content-Type': 'application/json',
|
105 |
+
'x-vqd-4': token,
|
106 |
+
},
|
107 |
+
body: JSON.stringify({
|
108 |
+
model: model,
|
109 |
+
messages: [
|
110 |
+
{
|
111 |
+
role: 'user',
|
112 |
+
content: content,
|
113 |
+
},
|
114 |
+
],
|
115 |
+
}),
|
116 |
+
})
|
117 |
+
|
118 |
+
if (!response.ok) {
|
119 |
+
throw new Error(`HTTP error! status: ${response.status}`)
|
120 |
+
}
|
121 |
+
return handlerStream(model, response.body, returnStream)
|
122 |
+
} catch (err) {
|
123 |
+
console.log(err)
|
124 |
+
if (retryCount < config.MAX_RETRY_COUNT) {
|
125 |
+
console.log('Retrying... count', ++retryCount)
|
126 |
+
await new Promise((resolve) => setTimeout(resolve, config.RETRY_DELAY))
|
127 |
+
return await createCompletion(model, content, returnStream, retryCount)
|
128 |
+
}
|
129 |
+
throw err
|
130 |
+
}
|
131 |
+
}
|
132 |
+
|
133 |
+
async function handlerStream(model, rb, returnStream) {
|
134 |
+
let bwzChunk = ''
|
135 |
+
let previousText = ''
|
136 |
+
const handChunkData = (chunk) => {
|
137 |
+
chunk = chunk.trim()
|
138 |
+
if (bwzChunk != '') {
|
139 |
+
chunk = bwzChunk + chunk
|
140 |
+
bwzChunk = ''
|
141 |
+
}
|
142 |
+
|
143 |
+
if (chunk.includes('[DONE]')) {
|
144 |
+
return chunk
|
145 |
+
}
|
146 |
+
|
147 |
+
if (chunk.slice(-2) !== '"}') {
|
148 |
+
bwzChunk = chunk
|
149 |
+
}
|
150 |
+
return chunk
|
151 |
+
}
|
152 |
+
const reader = rb.getReader()
|
153 |
+
const decoder = new TextDecoder()
|
154 |
+
const encoder = new TextEncoder()
|
155 |
+
const stream = new ReadableStream({
|
156 |
+
async start(controller) {
|
157 |
+
while (true) {
|
158 |
+
const { done, value } = await reader.read()
|
159 |
+
if (done) {
|
160 |
+
return controller.close()
|
161 |
+
}
|
162 |
+
const chunkStr = handChunkData(decoder.decode(value))
|
163 |
+
if (bwzChunk !== '') {
|
164 |
+
continue
|
165 |
+
}
|
166 |
+
|
167 |
+
chunkStr.split('\n').forEach((line) => {
|
168 |
+
if (line.length < 6) {
|
169 |
+
return
|
170 |
+
}
|
171 |
+
line = line.slice(6)
|
172 |
+
if (line !== '[DONE]') {
|
173 |
+
const originReq = JSON.parse(line)
|
174 |
+
|
175 |
+
if (originReq.action !== 'success') {
|
176 |
+
return controller.error(new Error('Error: originReq stream chunk is not success'))
|
177 |
+
}
|
178 |
+
|
179 |
+
if (originReq.message) {
|
180 |
+
previousText += originReq.message
|
181 |
+
if (returnStream) {
|
182 |
+
controller.enqueue(
|
183 |
+
encoder.encode(`data: ${JSON.stringify(newChatCompletionChunkWithModel(originReq.message, originReq.model))}\n\n`)
|
184 |
+
)
|
185 |
+
}
|
186 |
+
}
|
187 |
+
} else {
|
188 |
+
if (returnStream) {
|
189 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(newStopChunkWithModel('stop', model))}\n\n`))
|
190 |
+
} else {
|
191 |
+
controller.enqueue(encoder.encode(JSON.stringify(newChatCompletionWithModel(previousText, model))))
|
192 |
+
}
|
193 |
+
return controller.close()
|
194 |
+
}
|
195 |
+
})
|
196 |
+
continue
|
197 |
+
}
|
198 |
+
},
|
199 |
+
})
|
200 |
+
|
201 |
+
return new Response(stream, {
|
202 |
+
headers: {
|
203 |
+
'Content-Type': returnStream ? 'text/event-stream' : 'application/json',
|
204 |
+
},
|
205 |
+
})
|
206 |
+
}
|
207 |
+
|
208 |
+
function messagesPrepare(messages) {
|
209 |
+
let content = ''
|
210 |
+
for (const message of messages) {
|
211 |
+
let role = message.role === 'system' ? 'user' : message.role
|
212 |
+
|
213 |
+
if (['user', 'assistant'].includes(role)) {
|
214 |
+
const contentStr = Array.isArray(message.content)
|
215 |
+
? message.content
|
216 |
+
.filter((item) => item.text)
|
217 |
+
.map((item) => item.text)
|
218 |
+
.join('') || ''
|
219 |
+
: message.content
|
220 |
+
content += `${role}:${contentStr};\r\n`
|
221 |
+
}
|
222 |
+
}
|
223 |
+
return content
|
224 |
+
}
|
225 |
+
|
226 |
+
async function requestToken() {
|
227 |
+
const response = await fetch(`https://duckduckgo.com/duckchat/v1/status`, {
|
228 |
+
method: 'GET',
|
229 |
+
headers: {
|
230 |
+
...config.FAKE_HEADERS,
|
231 |
+
'x-vqd-accept': '1',
|
232 |
+
},
|
233 |
+
})
|
234 |
+
|
235 |
+
if (!response.ok) {
|
236 |
+
throw new Error(`HTTP error! status: ${response.status}`)
|
237 |
+
}
|
238 |
+
|
239 |
+
const token = response.headers.get('x-vqd-4')
|
240 |
+
return token
|
241 |
+
}
|
242 |
+
|
243 |
+
function convertModel(inputModel) {
|
244 |
+
let model
|
245 |
+
switch (inputModel.toLowerCase()) {
|
246 |
+
case 'claude-3-haiku':
|
247 |
+
model = 'claude-3-haiku-20240307'
|
248 |
+
break
|
249 |
+
case 'llama-3.1-70b':
|
250 |
+
model = 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo'
|
251 |
+
break
|
252 |
+
case 'mixtral-8x7b':
|
253 |
+
model = 'mistralai/Mixtral-8x7B-Instruct-v0.1'
|
254 |
+
break
|
255 |
+
}
|
256 |
+
return model || 'gpt-4o-mini'
|
257 |
+
}
|
258 |
+
|
259 |
+
function newChatCompletionChunkWithModel(text, model) {
|
260 |
+
return {
|
261 |
+
id: 'chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK',
|
262 |
+
object: 'chat.completion.chunk',
|
263 |
+
created: 0,
|
264 |
+
model,
|
265 |
+
choices: [
|
266 |
+
{
|
267 |
+
index: 0,
|
268 |
+
delta: {
|
269 |
+
content: text,
|
270 |
+
},
|
271 |
+
finish_reason: null,
|
272 |
+
},
|
273 |
+
],
|
274 |
+
}
|
275 |
+
}
|
276 |
+
|
277 |
+
function newStopChunkWithModel(reason, model) {
|
278 |
+
return {
|
279 |
+
id: 'chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK',
|
280 |
+
object: 'chat.completion.chunk',
|
281 |
+
created: 0,
|
282 |
+
model,
|
283 |
+
choices: [
|
284 |
+
{
|
285 |
+
index: 0,
|
286 |
+
finish_reason: reason,
|
287 |
+
},
|
288 |
+
],
|
289 |
+
}
|
290 |
+
}
|
291 |
+
|
292 |
+
function newChatCompletionWithModel(text, model) {
|
293 |
+
return {
|
294 |
+
id: 'chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK',
|
295 |
+
object: 'chat.completion',
|
296 |
+
created: 0,
|
297 |
+
model,
|
298 |
+
usage: {
|
299 |
+
prompt_tokens: 0,
|
300 |
+
completion_tokens: 0,
|
301 |
+
total_tokens: 0,
|
302 |
+
},
|
303 |
+
choices: [
|
304 |
+
{
|
305 |
+
message: {
|
306 |
+
content: text,
|
307 |
+
role: 'assistant',
|
308 |
+
},
|
309 |
+
index: 0,
|
310 |
+
},
|
311 |
+
],
|
312 |
+
}
|
313 |
+
}
|
314 |
+
|
315 |
+
// Serverless Service
|
316 |
+
|
317 |
+
;(async () => {
|
318 |
+
//For Cloudflare Workers
|
319 |
+
if (typeof addEventListener === 'function') return
|
320 |
+
// For Nodejs
|
321 |
+
const ittyServer = createServerAdapter(router.fetch)
|
322 |
+
console.log(`Listening on http://localhost:${process.env.PORT || 8787}`)
|
323 |
+
const httpServer = createServer(ittyServer)
|
324 |
+
httpServer.listen(8787)
|
325 |
+
})()
|
326 |
+
|
327 |
+
// export default router
|