Spaces:
Running
Running
Save logging
Browse files- har_and_cookies/.usage/2025-02-27.jsonl +99 -0
- logging/2025-02-27.jsonl +0 -0
- save.py +4 -1
- usage/2025-02-27.jsonl +99 -0
har_and_cookies/.usage/2025-02-27.jsonl
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 11, "completion_tokens": 0, "total_tokens": 11}
|
2 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 47, "completion_tokens": 0, "total_tokens": 47}
|
3 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 113, "completion_tokens": 0, "total_tokens": 113}
|
4 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
5 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 94, "completion_tokens": 0, "total_tokens": 94}
|
6 |
+
{"user": "Soska5242", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
7 |
+
{"user": "Soska5242", "model": "janus-pro-7b", "provider": "HuggingSpace", "prompt_tokens": 73, "completion_tokens": 0, "total_tokens": 73}
|
8 |
+
{"user": "zigblayck", "model": "flux", "provider": "G4F", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
9 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 46, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 156, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 202}
|
10 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 18, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 224, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 242}
|
11 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 23, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 253, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 276}
|
12 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 93, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 635, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 728}
|
13 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 227, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 712, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 939}
|
14 |
+
{"user": "ming-520", "model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 63, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 258, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 321}
|
15 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 464, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 9530, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 9994}
|
16 |
+
{"user": "roxky", "provider": "Gemini", "prompt_tokens": 22, "completion_tokens": 0, "total_tokens": 22}
|
17 |
+
{"user": "roxky", "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat", "prompt_tokens": 508, "completion_tokens": 0, "total_tokens": 508}
|
18 |
+
{"user": "roxky", "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat"}
|
19 |
+
{"user": "roxky", "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat"}
|
20 |
+
{"model": "DeepSeek-R1", "provider": "Blackbox", "prompt_tokens": 140, "completion_tokens": 17, "total_tokens": 157}
|
21 |
+
{"user": "roxky", "model": "voodoohop-flux-1-schnell", "provider": "HuggingSpace"}
|
22 |
+
{"user": "roxky", "model": "stabilityai-stable-diffusion-3-5-large", "provider": "HuggingSpace"}
|
23 |
+
{"user": "roxky", "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingFace"}
|
24 |
+
{"user": "roxky", "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingFace"}
|
25 |
+
{"user": "roxky", "provider": "Gemini"}
|
26 |
+
{"user": "JulfyCode", "model": "deepseek-r1", "provider": "DeepSeekAPI"}
|
27 |
+
{"user": "JulfyCode", "model": "gemini-2.0-flash-thinking", "provider": "Gemini"}
|
28 |
+
{"user": "roxky", "model": "qwen-qvq-72b-preview", "provider": "HuggingSpace"}
|
29 |
+
{"model": "evil", "provider": "PollinationsAI", "prompt_tokens": 442, "total_tokens": 457, "completion_tokens": 15, "prompt_tokens_details": null}
|
30 |
+
{"user": "roxky", "model": "openai", "provider": "PollinationsAI", "completion_tokens": 38, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 279, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 317}
|
31 |
+
{"user": "roxky", "model": "openai", "provider": "PollinationsAI", "completion_tokens": 88, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 941, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 1029}
|
32 |
+
{"user": "roxky", "model": "deepseek-v3", "provider": "DeepSeekAPI"}
|
33 |
+
{"user": "roxky", "model": "openai", "provider": "PollinationsAI", "completion_tokens": 41, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 825, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 866}
|
34 |
+
{"user": "roxky", "model": "openai", "provider": "PollinationsAI", "completion_tokens": 58, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 879, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 937}
|
35 |
+
{"user": "roxky", "model": "qwen-qvq-72b-preview", "provider": "HuggingSpace"}
|
36 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
37 |
+
{"user": "DeLtA456", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 353, "completion_tokens": 0, "total_tokens": 353}
|
38 |
+
{"user": "DeLtA456", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 771, "completion_tokens": 0, "total_tokens": 771}
|
39 |
+
{"user": "DeLtA456", "model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 266, "completion_tokens": 0, "total_tokens": 266}
|
40 |
+
{"user": "DeLtA456", "model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 918, "completion_tokens": 0, "total_tokens": 918}
|
41 |
+
{"user": "DeLtA456", "model": "gpt-4", "provider": "OpenaiChat", "prompt_tokens": 1128, "completion_tokens": 0, "total_tokens": 1128}
|
42 |
+
{"user": "roxky", "model": "o3-mini", "provider": "OpenaiChat"}
|
43 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 29, "completion_tokens": 828, "total_tokens": 857}
|
44 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 1031, "completion_tokens": 806, "total_tokens": 1837}
|
45 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 29, "completion_tokens": 0, "total_tokens": 29}
|
46 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 543, "completion_tokens": 1002, "total_tokens": 1545}
|
47 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 543, "completion_tokens": 852, "total_tokens": 1395}
|
48 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 1618, "completion_tokens": 1421, "total_tokens": 3039}
|
49 |
+
{"user": "avanpost20", "model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
50 |
+
{"user": "avanpost20", "model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 60, "completion_tokens": 0, "total_tokens": 60}
|
51 |
+
{"user": "avanpost20", "model": "deepseek-r1", "provider": "HuggingFace", "prompt_tokens": 202, "completion_tokens": 264, "total_tokens": 466}
|
52 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 2689, "completion_tokens": 853, "total_tokens": 3542}
|
53 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 3795, "completion_tokens": 1465, "total_tokens": 5260}
|
54 |
+
{"user": "roxky", "model": "o3-mini", "provider": "OpenaiChat"}
|
55 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 715, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 2701, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 3416}
|
56 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 733, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 3451, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 4184}
|
57 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
58 |
+
{"user": "jhordanjw123", "model": "o3-mini", "provider": "OpenaiChat", "prompt_tokens": 328, "completion_tokens": 0, "total_tokens": 328}
|
59 |
+
{"user": "jhordanjw123", "model": "claude", "provider": "PollinationsAI", "completion_tokens": 242, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 423, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 665}
|
60 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
61 |
+
{"user": "Mickeii", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8}
|
62 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
63 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
64 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
65 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
66 |
+
{"user": "venderu", "model": "flux-pro", "provider": "PollinationsAI", "prompt_tokens": 35, "completion_tokens": 0, "total_tokens": 35}
|
67 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 10, "completion_tokens": 28, "total_tokens": 38}
|
68 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
69 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
70 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
71 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 30, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 142, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 172}
|
72 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 50, "completion_tokens": 10, "total_tokens": 60}
|
73 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 15, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 188, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 203}
|
74 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 10, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 213, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 223}
|
75 |
+
{"model": "gpt-4o-mini", "provider": "Liaobots", "prompt_tokens": 126, "completion_tokens": 87, "total_tokens": 213}
|
76 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 10, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 330, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 340}
|
77 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 512, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 164, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 676}
|
78 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 12, "completion_tokens": 10, "total_tokens": 22}
|
79 |
+
{"user": "Emrik420", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
80 |
+
{"user": "Emrik420", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 35, "completion_tokens": 0, "total_tokens": 35}
|
81 |
+
{"user": "roxky", "model": "llama-3", "provider": "HuggingFace"}
|
82 |
+
{"user": "roxky", "model": "llama-3", "provider": "HuggingFace"}
|
83 |
+
{"user": "roxky", "model": "gemini-2.0", "provider": "PollinationsAI", "completion_tokens": 11, "prompt_tokens": 38, "total_tokens": 49}
|
84 |
+
{"user": "roxky", "model": "gemini-thinking", "provider": "PollinationsAI", "completion_tokens": 21, "prompt_tokens": 50, "total_tokens": 71}
|
85 |
+
{"user": "roxky", "model": "auto", "provider": "OpenaiChat"}
|
86 |
+
{"user": "WoxC", "model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
87 |
+
{"user": "Alex679", "model": "deepseek-r1", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 1, "total_tokens": 9}
|
88 |
+
{"user": "Alex679", "model": "flux-dev", "provider": "PollinationsImage", "prompt_tokens": 9, "completion_tokens": 0, "total_tokens": 9}
|
89 |
+
{"user": "Alex679", "model": "flux-dev", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8}
|
90 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 270, "completion_tokens": 0, "total_tokens": 270}
|
91 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 112, "completion_tokens": 0, "total_tokens": 112}
|
92 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 315, "completion_tokens": 0, "total_tokens": 315}
|
93 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 511, "completion_tokens": 0, "total_tokens": 511}
|
94 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 668, "completion_tokens": 0, "total_tokens": 668}
|
95 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 823, "completion_tokens": 0, "total_tokens": 823}
|
96 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 905, "completion_tokens": 0, "total_tokens": 905}
|
97 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 1070, "completion_tokens": 0, "total_tokens": 1070}
|
98 |
+
{"user": "AstroChan", "model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 1308, "completion_tokens": 0, "total_tokens": 1308}
|
99 |
+
{"user": "thingthatis", "model": "flux-dev", "provider": "PollinationsImage", "prompt_tokens": 86, "completion_tokens": 0, "total_tokens": 86}
|
logging/2025-02-27.jsonl
ADDED
The diff for this file is too large to render.
See raw diff
|
|
save.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import os
|
2 |
import json
|
|
|
3 |
from g4f.cookies import get_cookies_dir
|
4 |
|
5 |
def get_logs(log_dir):
|
@@ -20,4 +21,6 @@ for part in (".logging", ".usage"):
|
|
20 |
line.pop("origin")
|
21 |
if "user" in line:
|
22 |
line.pop("user")
|
23 |
-
|
|
|
|
|
|
1 |
import os
|
2 |
import json
|
3 |
+
import re
|
4 |
from g4f.cookies import get_cookies_dir
|
5 |
|
6 |
def get_logs(log_dir):
|
|
|
21 |
line.pop("origin")
|
22 |
if "user" in line:
|
23 |
line.pop("user")
|
24 |
+
data = json.dumps(line)
|
25 |
+
data = re.sub(r"Users\\\\\\.+?\\{2,}", "", data)
|
26 |
+
save.write(data + "\n")
|
usage/2025-02-27.jsonl
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 11, "completion_tokens": 0, "total_tokens": 11}
|
2 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 47, "completion_tokens": 0, "total_tokens": 47}
|
3 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 113, "completion_tokens": 0, "total_tokens": 113}
|
4 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
5 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 94, "completion_tokens": 0, "total_tokens": 94}
|
6 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
7 |
+
{"model": "janus-pro-7b", "provider": "HuggingSpace", "prompt_tokens": 73, "completion_tokens": 0, "total_tokens": 73}
|
8 |
+
{"model": "flux", "provider": "G4F", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13}
|
9 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 46, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 156, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 202}
|
10 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 18, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 224, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 242}
|
11 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 23, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 253, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 276}
|
12 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 93, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 635, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 728}
|
13 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 227, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 712, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 939}
|
14 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 63, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 258, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 321}
|
15 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 464, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 9530, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 9994}
|
16 |
+
{"provider": "Gemini", "prompt_tokens": 22, "completion_tokens": 0, "total_tokens": 22}
|
17 |
+
{"model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat", "prompt_tokens": 508, "completion_tokens": 0, "total_tokens": 508}
|
18 |
+
{"model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat"}
|
19 |
+
{"model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingChat"}
|
20 |
+
{"model": "DeepSeek-R1", "provider": "Blackbox", "prompt_tokens": 140, "completion_tokens": 17, "total_tokens": 157}
|
21 |
+
{"model": "voodoohop-flux-1-schnell", "provider": "HuggingSpace"}
|
22 |
+
{"model": "stabilityai-stable-diffusion-3-5-large", "provider": "HuggingSpace"}
|
23 |
+
{"model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingFace"}
|
24 |
+
{"model": "meta-llama/Llama-3.2-11B-Vision-Instruct", "provider": "HuggingFace"}
|
25 |
+
{"provider": "Gemini"}
|
26 |
+
{"model": "deepseek-r1", "provider": "DeepSeekAPI"}
|
27 |
+
{"model": "gemini-2.0-flash-thinking", "provider": "Gemini"}
|
28 |
+
{"model": "qwen-qvq-72b-preview", "provider": "HuggingSpace"}
|
29 |
+
{"model": "evil", "provider": "PollinationsAI", "prompt_tokens": 442, "total_tokens": 457, "completion_tokens": 15, "prompt_tokens_details": null}
|
30 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 38, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 279, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 317}
|
31 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 88, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 941, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 1029}
|
32 |
+
{"model": "deepseek-v3", "provider": "DeepSeekAPI"}
|
33 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 41, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 825, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 866}
|
34 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 58, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 879, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 937}
|
35 |
+
{"model": "qwen-qvq-72b-preview", "provider": "HuggingSpace"}
|
36 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
37 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 353, "completion_tokens": 0, "total_tokens": 353}
|
38 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 771, "completion_tokens": 0, "total_tokens": 771}
|
39 |
+
{"model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 266, "completion_tokens": 0, "total_tokens": 266}
|
40 |
+
{"model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 918, "completion_tokens": 0, "total_tokens": 918}
|
41 |
+
{"model": "gpt-4", "provider": "OpenaiChat", "prompt_tokens": 1128, "completion_tokens": 0, "total_tokens": 1128}
|
42 |
+
{"model": "o3-mini", "provider": "OpenaiChat"}
|
43 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 29, "completion_tokens": 828, "total_tokens": 857}
|
44 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 1031, "completion_tokens": 806, "total_tokens": 1837}
|
45 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 29, "completion_tokens": 0, "total_tokens": 29}
|
46 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 543, "completion_tokens": 1002, "total_tokens": 1545}
|
47 |
+
{"model": "deepseek-r1", "provider": "Blackbox", "prompt_tokens": 543, "completion_tokens": 852, "total_tokens": 1395}
|
48 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 1618, "completion_tokens": 1421, "total_tokens": 3039}
|
49 |
+
{"model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
50 |
+
{"model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 60, "completion_tokens": 0, "total_tokens": 60}
|
51 |
+
{"model": "deepseek-r1", "provider": "HuggingFace", "prompt_tokens": 202, "completion_tokens": 264, "total_tokens": 466}
|
52 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 2689, "completion_tokens": 853, "total_tokens": 3542}
|
53 |
+
{"model": "deepseek-r1", "provider": "Glider", "prompt_tokens": 3795, "completion_tokens": 1465, "total_tokens": 5260}
|
54 |
+
{"model": "o3-mini", "provider": "OpenaiChat"}
|
55 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 715, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 2701, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 3416}
|
56 |
+
{"model": "gpt-4o", "provider": "PollinationsAI", "completion_tokens": 733, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 3451, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 4184}
|
57 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
58 |
+
{"model": "o3-mini", "provider": "OpenaiChat", "prompt_tokens": 328, "completion_tokens": 0, "total_tokens": 328}
|
59 |
+
{"model": "claude", "provider": "PollinationsAI", "completion_tokens": 242, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 423, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 665}
|
60 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
61 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8}
|
62 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
63 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
64 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
65 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
66 |
+
{"model": "flux-pro", "provider": "PollinationsAI", "prompt_tokens": 35, "completion_tokens": 0, "total_tokens": 35}
|
67 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 10, "completion_tokens": 28, "total_tokens": 38}
|
68 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
69 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
70 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
71 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 30, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 142, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 172}
|
72 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 50, "completion_tokens": 10, "total_tokens": 60}
|
73 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 15, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 188, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 203}
|
74 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 10, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 213, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 223}
|
75 |
+
{"model": "gpt-4o-mini", "provider": "Liaobots", "prompt_tokens": 126, "completion_tokens": 87, "total_tokens": 213}
|
76 |
+
{"model": "gpt-4o-mini", "provider": "PollinationsAI", "completion_tokens": 10, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 330, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 340}
|
77 |
+
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 512, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 164, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 676}
|
78 |
+
{"model": "gpt-4o-mini", "provider": "DDG", "prompt_tokens": 12, "completion_tokens": 10, "total_tokens": 22}
|
79 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
80 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 35, "completion_tokens": 0, "total_tokens": 35}
|
81 |
+
{"model": "llama-3", "provider": "HuggingFace"}
|
82 |
+
{"model": "llama-3", "provider": "HuggingFace"}
|
83 |
+
{"model": "gemini-2.0", "provider": "PollinationsAI", "completion_tokens": 11, "prompt_tokens": 38, "total_tokens": 49}
|
84 |
+
{"model": "gemini-thinking", "provider": "PollinationsAI", "completion_tokens": 21, "prompt_tokens": 50, "total_tokens": 71}
|
85 |
+
{"model": "auto", "provider": "OpenaiChat"}
|
86 |
+
{"model": "llama-3.2-11b", "provider": "HuggingFace", "prompt_tokens": 10, "completion_tokens": 0, "total_tokens": 10}
|
87 |
+
{"model": "deepseek-r1", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 1, "total_tokens": 9}
|
88 |
+
{"model": "flux-dev", "provider": "PollinationsImage", "prompt_tokens": 9, "completion_tokens": 0, "total_tokens": 9}
|
89 |
+
{"model": "flux-dev", "provider": "HuggingFace", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8}
|
90 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 270, "completion_tokens": 0, "total_tokens": 270}
|
91 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 112, "completion_tokens": 0, "total_tokens": 112}
|
92 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 315, "completion_tokens": 0, "total_tokens": 315}
|
93 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 511, "completion_tokens": 0, "total_tokens": 511}
|
94 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 668, "completion_tokens": 0, "total_tokens": 668}
|
95 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 823, "completion_tokens": 0, "total_tokens": 823}
|
96 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 905, "completion_tokens": 0, "total_tokens": 905}
|
97 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 1070, "completion_tokens": 0, "total_tokens": 1070}
|
98 |
+
{"model": "meta-llama/Llama-3.2-1B-Instruct", "provider": "HuggingFace", "prompt_tokens": 1308, "completion_tokens": 0, "total_tokens": 1308}
|
99 |
+
{"model": "flux-dev", "provider": "PollinationsImage", "prompt_tokens": 86, "completion_tokens": 0, "total_tokens": 86}
|