hadadrjt commited on
Commit
bc87248
·
1 Parent(s): aa0dadc

SearchGPT: Production.

Browse files

Signed-off-by: Hadad <[email protected]>

config.py CHANGED
@@ -11,6 +11,12 @@ SEARXNG_ENDPOINT = "https://searx.stream/search" # See the endpoint list at htt
11
 
12
  READER_ENDPOINT = "https://r.jina.ai/"
13
 
 
 
 
 
 
 
14
  REQUEST_TIMEOUT = 300 # 5 minute
15
 
16
  MODEL = "gpt-4.1-nano"
@@ -186,6 +192,7 @@ ITERATION_METRICS = {
186
  0.06,
187
  0.07
188
  ],
 
189
  "backoff_multiplier": 0.2
190
  }
191
 
@@ -244,197 +251,6 @@ ENABLE_TRUST_ENV = True # AIOHTTP
244
 
245
  ENABLE_CONNECTOR_OWNER = True # AIOHTTP
246
 
247
- OS = [
248
- "Windows NT 10.0; Win64; x64",
249
- "Macintosh; Intel Mac OS X 10_15_7",
250
- "X11; Linux x86_64",
251
- "Windows NT 11.0; Win64; x64",
252
- "Macintosh; Intel Mac OS X 11_6_2"
253
- ]
254
-
255
- OCTETS = [
256
- 1, 2, 3, 4, 5, 8, 12, 13, 14, 15,
257
- 16, 17, 18, 19, 20, 23, 24, 34, 35, 36,
258
- 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
259
- 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
260
- 57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
261
- 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
262
- 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
263
- 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
264
- 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
265
- 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
266
- 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
267
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
268
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
269
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
270
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
271
- 168, 170, 171, 172, 173, 174, 175, 176, 177, 178,
272
- 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
273
- 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
274
- 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
275
- 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
276
- 219, 220, 221, 222, 223
277
- ]
278
-
279
- BROWSERS = [
280
- "Chrome",
281
- "Firefox",
282
- "Safari",
283
- "Edge",
284
- "Opera"
285
- ]
286
-
287
- CHROME_VERSIONS = [
288
- "120.0.0.0",
289
- "119.0.0.0",
290
- "118.0.0.0",
291
- "117.0.0.0",
292
- "116.0.0.0"
293
- ]
294
-
295
- FIREFOX_VERSIONS = [
296
- "121.0",
297
- "120.0",
298
- "119.0",
299
- "118.0",
300
- "117.0"
301
- ]
302
-
303
- SAFARI_VERSIONS = [
304
- "17.1",
305
- "17.0",
306
- "16.6",
307
- "16.5",
308
- "16.4",
309
- ]
310
-
311
- EDGE_VERSIONS = [
312
- "120.0.2210.91",
313
- "119.0.2151.97",
314
- "118.0.2088.76",
315
- "117.0.2045.60",
316
- "116.0.1938.81"
317
- ]
318
-
319
- DOMAINS = [
320
- "google.com",
321
- "bing.com",
322
- "yahoo.com",
323
- "duckduckgo.com",
324
- "baidu.com",
325
- "yandex.com",
326
- "facebook.com",
327
- "twitter.com",
328
- "linkedin.com",
329
- "reddit.com",
330
- "youtube.com",
331
- "wikipedia.org",
332
- "amazon.com",
333
- "github.com",
334
- "stackoverflow.com",
335
- "medium.com",
336
- "quora.com",
337
- "pinterest.com",
338
- "instagram.com",
339
- "tumblr.com"
340
- ]
341
-
342
- PROTOCOLS = [
343
- "https://",
344
- "https://www."
345
- ]
346
-
347
- SEARCH_ENGINES = [
348
- "https://www.google.com/search?q=",
349
- "https://www.bing.com/search?q=",
350
- "https://search.yahoo.com/search?p=",
351
- "https://duckduckgo.com/?q=",
352
- "https://www.baidu.com/s?wd=",
353
- "https://yandex.com/search/?text=",
354
- "https://www.google.co.uk/search?q=",
355
- "https://www.google.ca/search?q=",
356
- "https://www.google.com.au/search?q=",
357
- "https://www.google.de/search?q=",
358
- "https://www.google.fr/search?q=",
359
- "https://www.google.co.jp/search?q=",
360
- "https://www.google.com.br/search?q=",
361
- "https://www.google.co.in/search?q=",
362
- "https://www.google.ru/search?q=",
363
- "https://www.google.it/search?q="
364
- ]
365
-
366
- KEYWORDS = [
367
- "news",
368
- "weather",
369
- "sports",
370
- "technology",
371
- "science",
372
- "health",
373
- "finance",
374
- "entertainment",
375
- "travel",
376
- "food",
377
- "education",
378
- "business",
379
- "politics",
380
- "culture",
381
- "history",
382
- "music",
383
- "movies",
384
- "games",
385
- "books",
386
- "art"
387
- ]
388
-
389
- COUNTRIES = [
390
- "US", "GB", "CA", "AU", "DE", "FR", "JP", "BR", "IN", "RU",
391
- "IT", "ES", "MX", "NL", "SE", "NO", "DK", "FI", "PL", "TR",
392
- "KR", "SG", "HK", "TW", "TH", "ID", "MY", "PH", "VN", "AR",
393
- "CL", "CO", "PE", "VE", "EG", "ZA", "NG", "KE", "MA", "DZ",
394
- "TN", "IL", "AE", "SA", "QA", "KW", "BH", "OM", "JO", "LB"
395
- ]
396
-
397
- LANGUAGES = [
398
- "en-US", "en-GB", "en-CA", "en-AU", "de-DE", "fr-FR", "ja-JP",
399
- "pt-BR", "hi-IN", "ru-RU", "it-IT", "es-ES", "es-MX", "nl-NL",
400
- "sv-SE", "no-NO", "da-DK", "fi-FI", "pl-PL", "tr-TR", "ko-KR",
401
- "zh-CN", "zh-TW", "th-TH", "id-ID", "ms-MY", "fil-PH", "vi-VN",
402
- "es-AR", "es-CL", "es-CO", "es-PE", "es-VE", "ar-EG", "en-ZA",
403
- "en-NG", "sw-KE", "ar-MA", "ar-DZ", "ar-TN", "he-IL", "ar-AE",
404
- "ar-SA", "ar-QA", "ar-KW", "ar-BH", "ar-OM", "ar-JO", "ar-LB"
405
- ]
406
-
407
- TIMEZONES = [
408
- "America/New_York",
409
- "America/Chicago",
410
- "America/Los_Angeles",
411
- "America/Denver",
412
- "Europe/London",
413
- "Europe/Paris",
414
- "Europe/Berlin",
415
- "Europe/Moscow",
416
- "Asia/Tokyo",
417
- "Asia/Shanghai",
418
- "Asia/Hong_Kong",
419
- "Asia/Singapore",
420
- "Asia/Seoul",
421
- "Asia/Mumbai",
422
- "Asia/Dubai",
423
- "Australia/Sydney",
424
- "Australia/Melbourne",
425
- "America/Toronto",
426
- "America/Vancouver",
427
- "America/Mexico_City",
428
- "America/Sao_Paulo",
429
- "America/Buenos_Aires",
430
- "Africa/Cairo",
431
- "Africa/Johannesburg",
432
- "Africa/Lagos",
433
- "Africa/Nairobi",
434
- "Pacific/Auckland",
435
- "Pacific/Honolulu"
436
- ]
437
-
438
  DESCRIPTION = (
439
  "<b>SearchGPT</b> is <b>ChatGPT</b> with real-time web search capabilities "
440
  "and the ability to read content directly from a URL.<br><br>"
 
11
 
12
  READER_ENDPOINT = "https://r.jina.ai/"
13
 
14
+ USER_AGENT = (
15
+ "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 "
16
+ "(KHTML, like Gecko) Chromium/116.0.5845.97 Safari/537.36 "
17
+ "SearchGPT/1.0"
18
+ )
19
+
20
  REQUEST_TIMEOUT = 300 # 5 minute
21
 
22
  MODEL = "gpt-4.1-nano"
 
192
  0.06,
193
  0.07
194
  ],
195
+ "tools_reasoning_parsing": 1,
196
  "backoff_multiplier": 0.2
197
  }
198
 
 
251
 
252
  ENABLE_CONNECTOR_OWNER = True # AIOHTTP
253
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
  DESCRIPTION = (
255
  "<b>SearchGPT</b> is <b>ChatGPT</b> with real-time web search capabilities "
256
  "and the ability to read content directly from a URL.<br><br>"
src/core/__init__.py CHANGED
@@ -3,10 +3,8 @@
3
  # SPDX-License-Identifier: Apache-2.0
4
  #
5
 
6
- from .web_loader import WebLoader
7
  from .web_configuration import WebConfiguration
8
 
9
  __all__ = [
10
- 'WebLoader',
11
  'WebConfiguration'
12
  ]
 
3
  # SPDX-License-Identifier: Apache-2.0
4
  #
5
 
 
6
  from .web_configuration import WebConfiguration
7
 
8
  __all__ = [
 
9
  'WebConfiguration'
10
  ]
src/core/web_loader.py DELETED
@@ -1,160 +0,0 @@
1
- #
2
- # SPDX-FileCopyrightText: Hadad <[email protected]>
3
- # SPDX-License-Identifier: Apache-2.0
4
- #
5
-
6
- import random
7
- import threading
8
- from collections import deque
9
- from config import (
10
- OS,
11
- OCTETS,
12
- BROWSERS,
13
- CHROME_VERSIONS,
14
- FIREFOX_VERSIONS,
15
- SAFARI_VERSIONS,
16
- EDGE_VERSIONS,
17
- DOMAINS,
18
- PROTOCOLS,
19
- SEARCH_ENGINES,
20
- KEYWORDS,
21
- COUNTRIES,
22
- LANGUAGES,
23
- TIMEZONES
24
- )
25
-
26
- class WebLoader:
27
- def __init__(self):
28
- self.ipv4_pool = deque(maxlen=1000)
29
- self.ipv6_pool = deque(maxlen=1000)
30
- self.user_agent_pool = deque(maxlen=500)
31
- self.origin_pool = deque(maxlen=500)
32
- self.referrer_pool = deque(maxlen=500)
33
- self.location_pool = deque(maxlen=500)
34
- self.lock = threading.Lock()
35
- self.running = True
36
-
37
- def generate_ipv4(self):
38
- while len(self.ipv4_pool) < 1000 and self.running:
39
- with self.lock:
40
- self.ipv4_pool.append(
41
- f"{random.choice(OCTETS)}.{random.randint(0, 255)}."
42
- f"{random.randint(0, 255)}.{random.randint(1, 254)}"
43
- )
44
-
45
- def generate_ipv6(self):
46
- while len(self.ipv6_pool) < 1000 and self.running:
47
- with self.lock:
48
- self.ipv6_pool.append(
49
- ":".join([f"{random.randint(0, 65535):04x}" for _ in range(8)])
50
- )
51
-
52
- def generate_user_agents(self):
53
- while len(self.user_agent_pool) < 500 and self.running:
54
- with self.lock:
55
- match random.choice(BROWSERS):
56
- case "Chrome":
57
- self.user_agent_pool.append(
58
- f"Mozilla/5.0 ({random.choice(OS)}) AppleWebKit/537.36 "
59
- f"(KHTML, like Gecko) Chrome/{random.choice(CHROME_VERSIONS)} Safari/537.36"
60
- )
61
- case "Firefox":
62
- self.user_agent_pool.append(
63
- f"Mozilla/5.0 ({random.choice(OS)}) Gecko/20100101 "
64
- f"Firefox/{random.choice(FIREFOX_VERSIONS)}"
65
- )
66
- case "Safari":
67
- self.user_agent_pool.append(
68
- f"Mozilla/5.0 ({random.choice(OS)}) AppleWebKit/"
69
- f"{600 + random.randint(0, 15)}.{random.randint(1, 9)}.{random.randint(1, 20)} "
70
- f"(KHTML, like Gecko) Version/{random.choice(SAFARI_VERSIONS)} "
71
- f"Safari/{600 + random.randint(0, 15)}.{random.randint(1, 9)}.{random.randint(1, 20)}"
72
- )
73
- case "Edge":
74
- self.user_agent_pool.append(
75
- f"Mozilla/5.0 ({random.choice(OS)}) AppleWebKit/537.36 "
76
- f"(KHTML, like Gecko) Chrome/{random.choice(EDGE_VERSIONS).split('.')[0]}.0.0.0 "
77
- f"Safari/537.36 Edg/{random.choice(EDGE_VERSIONS)}"
78
- )
79
- case _:
80
- self.user_agent_pool.append(
81
- f"Mozilla/5.0 ({random.choice(OS)}) AppleWebKit/537.36 "
82
- f"(KHTML, like Gecko) Chrome/{random.randint(70, 100)}.0."
83
- f"{random.randint(3000, 5000)}.{random.randint(50, 150)} "
84
- f"Safari/537.36 OPR/{random.randint(80, 106)}.0.0.0"
85
- )
86
-
87
- def generate_origins(self):
88
- while len(self.origin_pool) < 500 and self.running:
89
- with self.lock:
90
- self.origin_pool.append(
91
- f"{random.choice(PROTOCOLS)}{random.choice(DOMAINS)}"
92
- )
93
-
94
- def generate_referrers(self):
95
- while len(self.referrer_pool) < 500 and self.running:
96
- with self.lock:
97
- self.referrer_pool.append(
98
- f"{random.choice(SEARCH_ENGINES)}{random.choice(KEYWORDS)}"
99
- )
100
-
101
- def generate_locations(self):
102
- while len(self.location_pool) < 500 and self.running:
103
- with self.lock:
104
- self.location_pool.append({
105
- "country": random.choice(COUNTRIES),
106
- "language": random.choice(LANGUAGES),
107
- "timezone": random.choice(TIMEZONES)
108
- })
109
-
110
- def get_ipv4(self):
111
- with self.lock:
112
- if self.ipv4_pool:
113
- return self.ipv4_pool[random.randint(0, len(self.ipv4_pool) - 1)]
114
- return (
115
- f"{random.randint(1, 223)}.{random.randint(0, 255)}."
116
- f"{random.randint(0, 255)}.{random.randint(1, 254)}"
117
- )
118
-
119
- def get_ipv6(self):
120
- with self.lock:
121
- if self.ipv6_pool:
122
- return self.ipv6_pool[random.randint(0, len(self.ipv6_pool) - 1)]
123
- return ":".join([f"{random.randint(0, 65535):04x}" for _ in range(8)])
124
-
125
- def get_user_agent(self):
126
- with self.lock:
127
- if self.user_agent_pool:
128
- return self.user_agent_pool[random.randint(0, len(self.user_agent_pool) - 1)]
129
-
130
- def get_origin(self):
131
- with self.lock:
132
- if self.origin_pool:
133
- return self.origin_pool[random.randint(0, len(self.origin_pool) - 1)]
134
-
135
- def get_referrer(self):
136
- with self.lock:
137
- if self.referrer_pool:
138
- return self.referrer_pool[random.randint(0, len(self.referrer_pool) - 1)]
139
-
140
- def get_location(self):
141
- with self.lock:
142
- if self.location_pool:
143
- return self.location_pool[random.randint(0, len(self.location_pool) - 1)]
144
-
145
- def start_engine(self):
146
- for target in [
147
- self.generate_ipv4,
148
- self.generate_ipv6,
149
- self.generate_user_agents,
150
- self.generate_origins,
151
- self.generate_referrers,
152
- self.generate_locations
153
- ]:
154
- threading.Thread(target=target, daemon=True).start()
155
-
156
- def stop(self):
157
- self.running = False
158
-
159
- web_loader = WebLoader()
160
- web_loader.start_engine()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/engine/browser_engine.py CHANGED
@@ -15,9 +15,9 @@ from config import (
15
  TCP_CONNECTOR_FORCE_CLOSE,
16
  TCP_CONNECTOR_ENABLE_CLEANUP,
17
  ENABLE_TRUST_ENV,
18
- ENABLE_CONNECTOR_OWNER
 
19
  )
20
- from ..core.web_loader import web_loader
21
 
22
  class BrowserEngine:
23
  def __init__(self, configuration):
@@ -25,29 +25,7 @@ class BrowserEngine:
25
 
26
  def generate_headers(self):
27
  return {
28
- "User-Agent": web_loader.get_user_agent(),
29
- "X-Forwarded-For": f"{web_loader.get_ipv4()}, {web_loader.get_ipv6()}",
30
- "X-Real-IP": web_loader.get_ipv4(),
31
- "X-Originating-IP": web_loader.get_ipv4(),
32
- "X-Remote-IP": web_loader.get_ipv4(),
33
- "X-Remote-Addr": web_loader.get_ipv4(),
34
- "X-Client-IP": web_loader.get_ipv4(),
35
- "X-Forwarded-Host": web_loader.get_origin().replace("https://", "").replace("http://", ""),
36
- "Origin": web_loader.get_origin(),
37
- "Referer": web_loader.get_referrer(),
38
- "Accept-Language": f"{web_loader.get_location()['language']},en;q=0.9",
39
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
40
- "Accept-Encoding": "gzip, deflate, br",
41
- "DNT": "1",
42
- "Connection": "keep-alive",
43
- "Upgrade-Insecure-Requests": "1",
44
- "Sec-Fetch-Dest": "document",
45
- "Sec-Fetch-Mode": "navigate",
46
- "Sec-Fetch-Site": "cross-site",
47
- "Sec-Fetch-User": "?1",
48
- "Cache-Control": "max-age=0",
49
- "X-Country": web_loader.get_location()['country'],
50
- "X-Timezone": web_loader.get_location()['timezone']
51
  }
52
 
53
  def web_selector(self, search_query: str, search_provider: str = "google"):
 
15
  TCP_CONNECTOR_FORCE_CLOSE,
16
  TCP_CONNECTOR_ENABLE_CLEANUP,
17
  ENABLE_TRUST_ENV,
18
+ ENABLE_CONNECTOR_OWNER,
19
+ USER_AGENT
20
  )
 
21
 
22
  class BrowserEngine:
23
  def __init__(self, configuration):
 
25
 
26
  def generate_headers(self):
27
  return {
28
+ "User-Agent": USER_AGENT
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  }
30
 
31
  def web_selector(self, search_query: str, search_provider: str = "google"):
src/processor/message_processor.py CHANGED
@@ -29,7 +29,6 @@ def searchgpt_playground(user_message, chat_history):
29
 
30
  conversation_messages = setup_response(chat_history, user_message)
31
  tool_response = ""
32
- tools_done = False
33
 
34
  for tool_update in tools_setup(
35
  server=server,
@@ -44,7 +43,6 @@ def searchgpt_playground(user_message, chat_history):
44
  else:
45
  conversation_messages = tool_update[0]
46
  tool_response = tool_update[1]
47
- tools_done = tool_update[2]
48
 
49
  if tool_response:
50
  yield tool_response + "\n\n"
@@ -52,9 +50,7 @@ def searchgpt_playground(user_message, chat_history):
52
  final_response_generator = generate_response(
53
  server=server,
54
  model_name=MODEL,
55
- conversation_messages=conversation_messages,
56
- tool_definitions=local_tools(),
57
- tools_done=tools_done
58
  )
59
 
60
  for final_response in final_response_generator:
 
29
 
30
  conversation_messages = setup_response(chat_history, user_message)
31
  tool_response = ""
 
32
 
33
  for tool_update in tools_setup(
34
  server=server,
 
43
  else:
44
  conversation_messages = tool_update[0]
45
  tool_response = tool_update[1]
 
46
 
47
  if tool_response:
48
  yield tool_response + "\n\n"
 
50
  final_response_generator = generate_response(
51
  server=server,
52
  model_name=MODEL,
53
+ conversation_messages=conversation_messages
 
 
54
  )
55
 
56
  for final_response in final_response_generator:
src/processor/response/generator.py CHANGED
@@ -12,9 +12,7 @@ from config import (
12
  def generate_response(
13
  server,
14
  model_name,
15
- conversation_messages,
16
- tool_definitions,
17
- tools_done=False
18
  ):
19
  response_generator = ""
20
 
@@ -22,8 +20,6 @@ def generate_response(
22
  response = server.chat.completions.create(
23
  model=model_name,
24
  messages=conversation_messages,
25
- tools=tool_definitions if not tools_done else None,
26
- tool_choice="none",
27
  temperature=CHAT_TEMPERATURE,
28
  stream=STREAM
29
  )
 
12
  def generate_response(
13
  server,
14
  model_name,
15
+ conversation_messages
 
 
16
  ):
17
  response_generator = ""
18
 
 
20
  response = server.chat.completions.create(
21
  model=model_name,
22
  messages=conversation_messages,
 
 
23
  temperature=CHAT_TEMPERATURE,
24
  stream=STREAM
25
  )
src/processor/tools/interaction.py CHANGED
@@ -62,6 +62,7 @@ def tools_setup(server, model_name, conversation_messages, tool_definitions, sea
62
  for i in range(0, len(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "parsing")) + 1):
63
  ITERATION_METRICS["logs_generator"] = styles(reasoning_interfaces(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "parsing"), i), expanded=True)
64
  yield ITERATION_METRICS["logs_generator"]
 
65
 
66
  for i in range(0, len(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "executing")) + 1):
67
  ITERATION_METRICS["logs_generator"] = styles(reasoning_interfaces(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "executing"), i), expanded=True)
 
62
  for i in range(0, len(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "parsing")) + 1):
63
  ITERATION_METRICS["logs_generator"] = styles(reasoning_interfaces(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "parsing"), i), expanded=True)
64
  yield ITERATION_METRICS["logs_generator"]
65
+ time.sleep(ITERATION_METRICS["tools_reasoning_parsing"])
66
 
67
  for i in range(0, len(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "executing")) + 1):
68
  ITERATION_METRICS["logs_generator"] = styles(reasoning_interfaces(tool_reasoning(tool_invocation.function.name, tools_parser(tool_invocation.function.arguments)[0], "executing"), i), expanded=True)