Spaces:
Paused
Paused
Initial upload of PWDLV3 content
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -35
- .github/workflows/Setup.yml +60 -0
- .github/workflows/main.yml +34 -0
- .gitignore +204 -0
- Dockerfile +40 -0
- README.md +57 -7
- beta/Syncer/db_utils/DataObject.py +101 -0
- beta/Syncer/db_utils/Database.py +77 -0
- beta/Syncer/db_utils/Errors.py +21 -0
- beta/Syncer/db_utils/Schema.py +100 -0
- beta/Syncer/main.py +112 -0
- beta/api/api.py +55 -0
- beta/api/api_dl.py +52 -0
- beta/api/blueprints/admin_routes.py +157 -0
- beta/api/blueprints/api_pref_manager.py +46 -0
- beta/api/blueprints/client_info_routes.py +149 -0
- beta/api/blueprints/leagacy_create_task.py +40 -0
- beta/api/blueprints/login.py +51 -0
- beta/api/blueprints/scarper.py +349 -0
- beta/api/blueprints/session_lodge.py +137 -0
- beta/api/blueprints/template_routes.py +45 -0
- beta/api/blueprints/while_dl_and_post_dl.py +42 -0
- beta/api/mr_manager/boss_manager.py +9 -0
- beta/api/mr_manager/client_manager.py +186 -0
- beta/api/mr_manager/task_manager.py +124 -0
- beta/api/templates/base.html +269 -0
- beta/api/templates/error.html +80 -0
- beta/api/templates/index.html +23 -0
- beta/batch_scraper_2/Endpoints.py +150 -0
- beta/obsolete/batch_scraper/Endpoints.py +178 -0
- beta/obsolete/batch_scraper/app.py +138 -0
- beta/question_scraper/Endpoints.py +72 -0
- beta/question_scraper/app.py +86 -0
- beta/shellLogic/Plugin.py +80 -0
- beta/shellLogic/TokenUpdate.py +34 -0
- beta/shellLogic/handleLogics/HandleBasicCMDUtils.py +51 -0
- beta/shellLogic/handleLogics/HandleBatch.py +177 -0
- beta/shellLogic/handleLogics/HandleHell.py +12 -0
- beta/shellLogic/handleLogics/HandleKeyAndAvailiblity.py +42 -0
- beta/shellLogic/handleLogics/HandleQuestions.py +117 -0
- beta/shellLogic/handleLogics/HandleShellDL.py +74 -0
- beta/shellLogic/handleLogics/HandleWEB.py +14 -0
- beta/shellLogic/logic.py +25 -0
- beta/shellLogic/logicError.py +6 -0
- beta/shellLogic/shell.py +76 -0
- beta/shellLogic/shell_var.py +57 -0
- beta/shellLogic/simpleParser.py +6 -0
- beta/update.py +38 -0
- beta/util.py +30 -0
- defaults.json +89 -0
.gitattributes
CHANGED
@@ -1,35 +1,2 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
1 |
+
# Auto detect text files and perform LF normalization
|
2 |
+
* text=auto
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.github/workflows/Setup.yml
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Verify PWDLv3 Setup
|
2 |
+
on:
|
3 |
+
workflow_dispatch:
|
4 |
+
|
5 |
+
jobs:
|
6 |
+
verify_setup:
|
7 |
+
runs-on: windows-latest
|
8 |
+
steps:
|
9 |
+
- name: Download Setup Script
|
10 |
+
run: |
|
11 |
+
curl -o setup_pwdl.bat -L https://gist.githubusercontent.com/shubhamakshit/8654b80e9f64c2791d7f8e681cb1b47d/raw
|
12 |
+
dir setup_pwdl.bat
|
13 |
+
|
14 |
+
- name: Verify Script Content
|
15 |
+
shell: powershell
|
16 |
+
run: |
|
17 |
+
Write-Host "Checking script content..."
|
18 |
+
Get-Content setup_pwdl.bat
|
19 |
+
|
20 |
+
- name: Run Setup Script
|
21 |
+
shell: cmd
|
22 |
+
run: |
|
23 |
+
echo "Starting setup..."
|
24 |
+
setup_pwdl.bat
|
25 |
+
echo "Setup completed"
|
26 |
+
|
27 |
+
- name: Verify Installation
|
28 |
+
shell: powershell
|
29 |
+
run: |
|
30 |
+
Write-Host "Checking environment..."
|
31 |
+
$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
|
32 |
+
|
33 |
+
Write-Host "Checking PWDL installation..."
|
34 |
+
Get-Command pwdl -ErrorAction SilentlyContinue
|
35 |
+
if ($?) {
|
36 |
+
Write-Host "PWDL command found"
|
37 |
+
pwdl --version
|
38 |
+
pwdl --help
|
39 |
+
} else {
|
40 |
+
Write-Error "PWDL command not found"
|
41 |
+
exit 1
|
42 |
+
}
|
43 |
+
|
44 |
+
Write-Host "Checking if service is running..."
|
45 |
+
pwdl --verbose
|
46 |
+
|
47 |
+
- name: Check Dependencies
|
48 |
+
shell: powershell
|
49 |
+
run: |
|
50 |
+
Write-Host "Verifying dependencies..."
|
51 |
+
$deps = @("python", "git", "ffmpeg")
|
52 |
+
foreach ($dep in $deps) {
|
53 |
+
if (Get-Command $dep -ErrorAction SilentlyContinue) {
|
54 |
+
Write-Host "$dep is installed and accessible"
|
55 |
+
& $dep --version
|
56 |
+
} else {
|
57 |
+
Write-Error "$dep is not installed or not in PATH"
|
58 |
+
exit 1
|
59 |
+
}
|
60 |
+
}
|
.github/workflows/main.yml
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Build and Push Docker Image
|
2 |
+
|
3 |
+
on:
|
4 |
+
push:
|
5 |
+
branches:
|
6 |
+
- main
|
7 |
+
workflow_dispatch:
|
8 |
+
|
9 |
+
jobs:
|
10 |
+
build:
|
11 |
+
runs-on: ubuntu-latest
|
12 |
+
|
13 |
+
steps:
|
14 |
+
- name: Checkout repository
|
15 |
+
uses: actions/checkout@v2
|
16 |
+
|
17 |
+
- name: Set up Docker Buildx
|
18 |
+
uses: docker/setup-buildx-action@v1
|
19 |
+
|
20 |
+
- name: Login to Docker Hub
|
21 |
+
uses: docker/login-action@v2
|
22 |
+
with:
|
23 |
+
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
24 |
+
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
25 |
+
|
26 |
+
- name: Build and push Docker image
|
27 |
+
uses: docker/build-push-action@v2
|
28 |
+
with:
|
29 |
+
context: .
|
30 |
+
push: true
|
31 |
+
tags: shubhamakshit/pwdl:latest
|
32 |
+
|
33 |
+
- name: Logout Docker Hub
|
34 |
+
run: docker logout
|
.gitignore
ADDED
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Created by https://www.toptal.com/developers/gitignore/api/python
|
2 |
+
# Edit at https://www.toptal.com/developers/gitignore?templates=python
|
3 |
+
|
4 |
+
### Python ###
|
5 |
+
# Byte-compiled / optimized / DLL files
|
6 |
+
__pycache__/
|
7 |
+
*.py[cod]
|
8 |
+
*$py.class
|
9 |
+
|
10 |
+
# IDE
|
11 |
+
.idea/
|
12 |
+
|
13 |
+
# Download Files
|
14 |
+
webdl/
|
15 |
+
|
16 |
+
# CSV Files
|
17 |
+
*.csv
|
18 |
+
|
19 |
+
# C extensions
|
20 |
+
*.so
|
21 |
+
|
22 |
+
# Distribution / packaging
|
23 |
+
.Python
|
24 |
+
build/
|
25 |
+
develop-eggs/
|
26 |
+
dist/
|
27 |
+
downloads/
|
28 |
+
eggs/
|
29 |
+
.eggs/
|
30 |
+
lib/
|
31 |
+
lib64/
|
32 |
+
parts/
|
33 |
+
sdist/
|
34 |
+
var/
|
35 |
+
wheels/
|
36 |
+
share/python-wheels/
|
37 |
+
*.egg-info/
|
38 |
+
.installed.cfg
|
39 |
+
*.egg
|
40 |
+
MANIFEST
|
41 |
+
|
42 |
+
# PyInstaller
|
43 |
+
# Usually these files are written by a python script from a template
|
44 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
45 |
+
*.manifest
|
46 |
+
*.spec
|
47 |
+
|
48 |
+
# Installer logs
|
49 |
+
pip-log.txt
|
50 |
+
pip-delete-this-directory.txt
|
51 |
+
|
52 |
+
# Unit test / coverage reports
|
53 |
+
htmlcov/
|
54 |
+
.tox/
|
55 |
+
.nox/
|
56 |
+
.coverage
|
57 |
+
.coverage.*
|
58 |
+
.cache
|
59 |
+
nosetests.xml
|
60 |
+
coverage.xml
|
61 |
+
*.cover
|
62 |
+
*.py,cover
|
63 |
+
.hypothesis/
|
64 |
+
.pytest_cache/
|
65 |
+
cover/
|
66 |
+
|
67 |
+
# Translations
|
68 |
+
*.mo
|
69 |
+
*.pot
|
70 |
+
|
71 |
+
# Django stuff:
|
72 |
+
*.log
|
73 |
+
local_settings.py
|
74 |
+
db.sqlite3
|
75 |
+
db.sqlite3-journal
|
76 |
+
|
77 |
+
# Flask stuff:
|
78 |
+
instance/
|
79 |
+
.webassets-cache
|
80 |
+
|
81 |
+
# Scrapy stuff:
|
82 |
+
.scrapy
|
83 |
+
|
84 |
+
# Sphinx documentation
|
85 |
+
docs/_build/
|
86 |
+
|
87 |
+
# PyBuilder
|
88 |
+
.pybuilder/
|
89 |
+
target/
|
90 |
+
|
91 |
+
# Jupyter Notebook
|
92 |
+
.ipynb_checkpoints
|
93 |
+
|
94 |
+
# IPython
|
95 |
+
profile_default/
|
96 |
+
ipython_config.py
|
97 |
+
|
98 |
+
# pyenv
|
99 |
+
# For a library or package, you might want to ignore these files since the code is
|
100 |
+
# intended to run in multiple environments; otherwise, check them in:
|
101 |
+
# .python-version
|
102 |
+
|
103 |
+
# pipenv
|
104 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
105 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
106 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
107 |
+
# install all needed dependencies.
|
108 |
+
#Pipfile.lock
|
109 |
+
|
110 |
+
# poetry
|
111 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
112 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
113 |
+
# commonly ignored for libraries.
|
114 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
115 |
+
#poetry.lock
|
116 |
+
|
117 |
+
# pdm
|
118 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
119 |
+
#pdm.lock
|
120 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
121 |
+
# in version control.
|
122 |
+
# https://pdm.fming.dev/#use-with-ide
|
123 |
+
.pdm.toml
|
124 |
+
|
125 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
126 |
+
__pypackages__/
|
127 |
+
|
128 |
+
# Celery stuff
|
129 |
+
celerybeat-schedule
|
130 |
+
celerybeat.pid
|
131 |
+
|
132 |
+
# SageMath parsed files
|
133 |
+
*.sage.py
|
134 |
+
|
135 |
+
# Environments
|
136 |
+
.env
|
137 |
+
.venv
|
138 |
+
env/
|
139 |
+
venv/
|
140 |
+
ENV/
|
141 |
+
env.bak/
|
142 |
+
venv.bak/
|
143 |
+
|
144 |
+
# Spyder project settings
|
145 |
+
.spyderproject
|
146 |
+
.spyproject
|
147 |
+
|
148 |
+
# Rope project settings
|
149 |
+
.ropeproject
|
150 |
+
|
151 |
+
# mkdocs documentation
|
152 |
+
/site
|
153 |
+
|
154 |
+
# mypy
|
155 |
+
.mypy_cache/
|
156 |
+
.dmypy.json
|
157 |
+
dmypy.json
|
158 |
+
|
159 |
+
# Pyre type checker
|
160 |
+
.pyre/
|
161 |
+
|
162 |
+
# pytype static type analyzer
|
163 |
+
.pytype/
|
164 |
+
|
165 |
+
# Cython debug symbols
|
166 |
+
cython_debug/
|
167 |
+
|
168 |
+
# PyCharm
|
169 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
170 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
171 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
172 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
173 |
+
#.idea/
|
174 |
+
|
175 |
+
### Python Patch ###
|
176 |
+
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
177 |
+
poetry.toml
|
178 |
+
|
179 |
+
# ruff
|
180 |
+
.ruff_cache/
|
181 |
+
|
182 |
+
# LSP config files
|
183 |
+
pyrightconfig.json
|
184 |
+
|
185 |
+
# End of https://www.toptal.com/developers/gitignore/api/python
|
186 |
+
pwdlv3.lnk
|
187 |
+
*.mp4
|
188 |
+
/bin/Logs/
|
189 |
+
*.m4s
|
190 |
+
/tmp
|
191 |
+
/bin
|
192 |
+
*.un~
|
193 |
+
*.py~
|
194 |
+
|
195 |
+
# ignore all *.test.py files
|
196 |
+
*.test.py
|
197 |
+
#*.json
|
198 |
+
clients.json
|
199 |
+
clients.json
|
200 |
+
/csv_files/
|
201 |
+
/populate.py
|
202 |
+
/populate.py
|
203 |
+
/populate.py
|
204 |
+
pop.bat
|
Dockerfile
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use an official Python runtime as a parent image
|
2 |
+
FROM python:3.12-slim
|
3 |
+
|
4 |
+
# Set the working directory in the container to /app
|
5 |
+
WORKDIR /app
|
6 |
+
|
7 |
+
# Install system dependencies
|
8 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
9 |
+
ffmpeg \
|
10 |
+
curl \
|
11 |
+
&& rm -rf /var/lib/apt/lists/*
|
12 |
+
|
13 |
+
# Create bin directory and download mp4decrypt
|
14 |
+
RUN mkdir -p /app/bin && \
|
15 |
+
curl -o /app/bin/mp4decrypt https://raw.githubusercontent.com/shubhamakshit/pwdlv3_assets/main/GNU/Linux/x86_64/mp4decrypt && \
|
16 |
+
chmod +x /app/bin/mp4decrypt
|
17 |
+
|
18 |
+
# Add /app/bin to PATH
|
19 |
+
ENV PATH="/app/bin:$PATH"
|
20 |
+
|
21 |
+
# Copy requirements first for better caching
|
22 |
+
COPY requirements.txt .
|
23 |
+
|
24 |
+
# Install Python dependencies including gunicorn
|
25 |
+
RUN pip install --no-cache-dir -r requirements.txt && \
|
26 |
+
pip install --no-cache-dir gunicorn
|
27 |
+
|
28 |
+
# Copy the rest of the application
|
29 |
+
COPY . .
|
30 |
+
|
31 |
+
# Make sure /app is writable (after copying files)
|
32 |
+
RUN chmod -R 777 /app && \
|
33 |
+
mkdir -p /app/webdl && \
|
34 |
+
chmod 777 /app/webdl
|
35 |
+
|
36 |
+
# Expose port 7860 for HuggingFace Spaces
|
37 |
+
EXPOSE 7860
|
38 |
+
|
39 |
+
# Run the application with gunicorn
|
40 |
+
CMD ["gunicorn", "--bind", "0.0.0.0:7860","--timeout", "120", "beta.api.api:app"]
|
README.md
CHANGED
@@ -1,12 +1,62 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
-
app_file:
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: pwdlv3
|
3 |
+
emoji: 📦
|
4 |
+
colorFrom: red
|
5 |
+
colorTo: blue
|
6 |
sdk: gradio
|
7 |
+
sdk_version: "20.10"
|
8 |
+
app_file: Dockerfile
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
+
|
13 |
+
# PWDLv3 - pw.live Downloader Version 3
|
14 |
+
|
15 |
+
## History
|
16 |
+
This project started way back in 2023 as [pwdl](https://github.com/shubhamakshit/pwdl). It was then merely a script for downloading segments using a loophole in pw's api. When it got pached we moved on to [pwdlv2](https://github.com/shubhamakshit/pwdlv2) in March, 2024. It was mostly haphazard code. pwdlv3 started in April, 2024 as is proudly growing ever since.
|
17 |
+
|
18 |
+
## Installing guide
|
19 |
+
|
20 |
+
### Windows (64bit)
|
21 |
+
Open Powershell as **Administrator**
|
22 |
+
```powershell
|
23 |
+
irm https://raw.githubusercontent.com/shubhamakshit/pwdlv3_assets/main/dl.pwdlv3.ps1 | iex
|
24 |
+
```
|
25 |
+
|
26 |
+
### Linux (64bit)
|
27 |
+
```bash
|
28 |
+
git clone https://github.com/shubhamakshit/pwdlv3.git
|
29 |
+
cd pwdlv3
|
30 |
+
chmod +x ./setup.sh
|
31 |
+
./setup.sh -f # -f means ffmpeg ; (ffmpeg binary in PATH is given preference)
|
32 |
+
source ~/.bashrc
|
33 |
+
```
|
34 |
+
## Getting started
|
35 |
+
### Logging in
|
36 |
+
For *more* interactive login
|
37 |
+
```bash
|
38 |
+
pwdl --login
|
39 |
+
```
|
40 |
+
For *less* interactive login
|
41 |
+
**Tip** : Prepend the phone number by `wa` for whatsapp OTP.
|
42 |
+
```bash
|
43 |
+
pwdl --phone waXXXXXXXXXX # or pwdl --phone XXXXXXXXXX
|
44 |
+
```
|
45 |
+
### Starting webui
|
46 |
+
The webui as of May, 2025 has moved to [pwdl-webui.vercel.app](https://pwdl-webui.vercel.app). The backend however must be on your local machine.
|
47 |
+
```bash
|
48 |
+
pwdl --webui
|
49 |
+
```
|
50 |
+
1. Look for a url with a port number (default:**5000**)
|
51 |
+

|
52 |
+
|
53 |
+
2. Open that url. In most cases [localhost:5000](http://localhost:5000).
|
54 |
+

|
55 |
+
|
56 |
+
3. Open the Online WebUI. As of now [pwdl-webui.vercel.app](https://pwdl-webui.vercel.app).
|
57 |
+

|
58 |
+
|
59 |
+
1. Open Settings -> WebSettings Click on Edit and change the API value to the url obtained in 2.
|
60 |
+

|
61 |
+
2. Explore other values of Web Settings to get your desired value.
|
62 |
+
3. Open 'BOSS' (Library) and Enjoy
|
beta/Syncer/db_utils/DataObject.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.Syncer.db_utils.Errors import Errors
|
2 |
+
from beta.Syncer.db_utils.Schema import Schema
|
3 |
+
|
4 |
+
|
5 |
+
class DbObject:
|
6 |
+
|
7 |
+
class IllegalDBTuple(Exception):
|
8 |
+
|
9 |
+
req_size = 2
|
10 |
+
|
11 |
+
def __init__(self, data):
|
12 |
+
self.data = data
|
13 |
+
super().__init__(f"Data must be a tuple, got {type(data)}")
|
14 |
+
|
15 |
+
def __str__(self):
|
16 |
+
if len(self.data) != self.req_size:
|
17 |
+
return f"Data must be a tuple of length 2, got {len(self.data)}"
|
18 |
+
return f"Data must be a tuple, got {type(self.data)}"
|
19 |
+
|
20 |
+
class AttrNotFound(Exception):
|
21 |
+
|
22 |
+
def __init__(self, key):
|
23 |
+
self.key = key
|
24 |
+
super().__init__(f"Attribute {key} not found")
|
25 |
+
|
26 |
+
def __str__(self):
|
27 |
+
return f"Attribute {self.key} not found"
|
28 |
+
|
29 |
+
class IncompleteData(Exception):
|
30 |
+
|
31 |
+
def __init__(self, key):
|
32 |
+
self.key = key
|
33 |
+
super().__init__(f"Key {key} is required")
|
34 |
+
|
35 |
+
def __str__(self):
|
36 |
+
return f"Key {self.key} is required"
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
|
43 |
+
def __init__(self, schema, add_id=True):
|
44 |
+
self.schema = Schema(schema) if isinstance(schema, dict) else schema
|
45 |
+
if add_id:
|
46 |
+
self.schema.add_id()
|
47 |
+
self._id = self.schema.schema["_id"]["function"]()
|
48 |
+
|
49 |
+
def __str__(self):
|
50 |
+
return str(self.compile())
|
51 |
+
|
52 |
+
def add(self, data):
|
53 |
+
if not isinstance(data, tuple):
|
54 |
+
raise DbObject.IllegalDBTuple(data)
|
55 |
+
|
56 |
+
key = data[0]
|
57 |
+
value = data[1]
|
58 |
+
|
59 |
+
if self.schema.validate(key, value):
|
60 |
+
setattr(self, key, value)
|
61 |
+
return self
|
62 |
+
|
63 |
+
def does_exist(self, key):
|
64 |
+
return hasattr(self, key)
|
65 |
+
|
66 |
+
def update(self,data):
|
67 |
+
if not isinstance(data, tuple):
|
68 |
+
raise DbObject.IllegalDBTuple(data)
|
69 |
+
|
70 |
+
key = data[0]
|
71 |
+
value = data[1]
|
72 |
+
|
73 |
+
if self.schema.validate(key, value):
|
74 |
+
if not self.does_exist(key):
|
75 |
+
raise DbObject.AttrNotFound(key)
|
76 |
+
self.add(data)
|
77 |
+
return self
|
78 |
+
|
79 |
+
def compile(self):
|
80 |
+
data = {}
|
81 |
+
for key, value in self.schema.schema.items():
|
82 |
+
if not self.does_exist(key):
|
83 |
+
if self.schema.is_required(key):
|
84 |
+
raise DbObject.IncompleteData(key)
|
85 |
+
else:
|
86 |
+
data[key] = getattr(self, key)
|
87 |
+
return data
|
88 |
+
|
89 |
+
def del_key(self, key):
|
90 |
+
if self.does_exist(key): delattr(self, key)
|
91 |
+
|
92 |
+
def req_keys_more(self):
|
93 |
+
return self.keys_more(req=True)
|
94 |
+
|
95 |
+
def keys_more(self,req=False):
|
96 |
+
inc = []
|
97 |
+
for key in self.schema.get_keys():
|
98 |
+
#print(f"Debug: does_exist({key}) = {self.does_exist(key)}")
|
99 |
+
if self.schema.is_required(key) and (req and not self.does_exist(key)):
|
100 |
+
inc.append(key)
|
101 |
+
return inc
|
beta/Syncer/db_utils/Database.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pymongo import MongoClient
|
2 |
+
from pymongo.server_api import ServerApi
|
3 |
+
|
4 |
+
from beta.Syncer.db_utils.DataObject import DbObject
|
5 |
+
from beta.Syncer.db_utils.Schema import Schema
|
6 |
+
|
7 |
+
|
8 |
+
class DB:
|
9 |
+
|
10 |
+
|
11 |
+
def __init__(self,uri):
|
12 |
+
import dns.resolver
|
13 |
+
dns.resolver.default_resolver=dns.resolver.Resolver(configure=False)
|
14 |
+
dns.resolver.default_resolver.nameservers=['8.8.8.8']
|
15 |
+
self.uri = uri
|
16 |
+
self.client = MongoClient(uri, server_api=ServerApi('1'))
|
17 |
+
self.db = self.client["OPCluster"]
|
18 |
+
self.collection = self.db["OPCluster"]
|
19 |
+
|
20 |
+
def set_db(self,db):
|
21 |
+
self.db = self.client[db]
|
22 |
+
return self
|
23 |
+
|
24 |
+
def set_collection(self,collection):
|
25 |
+
self.collection = self.db[collection]
|
26 |
+
return self
|
27 |
+
|
28 |
+
def insert(self,db_obj:DbObject):
|
29 |
+
return self.collection.insert_one(db_obj.compile())
|
30 |
+
|
31 |
+
def find(self,query):
|
32 |
+
return self.collection.find(query)
|
33 |
+
|
34 |
+
def list_all(self):
|
35 |
+
import json
|
36 |
+
for i in self.collection.find():
|
37 |
+
print(json.dumps(i, indent=4))
|
38 |
+
|
39 |
+
def get_object(self,query):
|
40 |
+
data = self.collection.find_one(query)
|
41 |
+
|
42 |
+
if data:
|
43 |
+
obj = DbObject(Schema.gen_schema(data))
|
44 |
+
for key, value in data.items():
|
45 |
+
obj.add((key, value))
|
46 |
+
else:
|
47 |
+
obj = DbObject(Schema.gen_schema(data), False)
|
48 |
+
return obj
|
49 |
+
|
50 |
+
def get_objects(self,query,limit:int = 0):
|
51 |
+
data = self.collection.find(query).limit(limit)
|
52 |
+
objs = []
|
53 |
+
for i in data:
|
54 |
+
obj = DbObject(Schema.gen_schema(i))
|
55 |
+
for key, value in i.items():
|
56 |
+
obj.add((key, value))
|
57 |
+
objs.append(obj)
|
58 |
+
return objs
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
def delete(self,query):
|
63 |
+
return self.collection.delete_one(query)
|
64 |
+
|
65 |
+
def delete_many(self,query):
|
66 |
+
return self.collection.delete_many(query)
|
67 |
+
|
68 |
+
def update(self,query,update):
|
69 |
+
return self.collection.update_one(query,update)
|
70 |
+
|
71 |
+
def update_many(self,query,update):
|
72 |
+
return self.collection.update_many(query,update)
|
73 |
+
|
74 |
+
def ping(self):
|
75 |
+
return self.client.server_info()
|
76 |
+
|
77 |
+
|
beta/Syncer/db_utils/Errors.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class Errors:
|
2 |
+
|
3 |
+
|
4 |
+
|
5 |
+
|
6 |
+
class IllegalType(Exception):
|
7 |
+
|
8 |
+
def __init__(self,data_type,key,object={}):
|
9 |
+
self.data_type = data_type
|
10 |
+
self.key = key
|
11 |
+
self.object = object
|
12 |
+
super().__init__(f"Expected {data_type} for key {key} in object {object}")
|
13 |
+
|
14 |
+
class MissingKey(Exception):
|
15 |
+
|
16 |
+
def __init__(self,key,object={}):
|
17 |
+
self.key = key
|
18 |
+
self.object = object
|
19 |
+
|
20 |
+
def __str__(self):
|
21 |
+
return f"Missing key {self.key} in object {self.object}"
|
beta/Syncer/db_utils/Schema.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.Syncer.db_utils.Errors import Errors
|
2 |
+
import uuid
|
3 |
+
|
4 |
+
class Schema:
|
5 |
+
class InvalidSchema(Exception):
|
6 |
+
def __init__(self, schema, key, value, type, required=False):
|
7 |
+
self.schema = schema
|
8 |
+
self.key = key
|
9 |
+
self.value = value
|
10 |
+
self.type = type
|
11 |
+
self.required = required
|
12 |
+
super().__init__(f"Invalid schema {schema}")
|
13 |
+
|
14 |
+
def __str__(self):
|
15 |
+
if self.required:
|
16 |
+
return f"Invalid schema {self.schema} for key {self.key} with value {self.value} of type {type(self.type)} is required"
|
17 |
+
return f"Invalid schema {self.schema} for key {self.key} with value {self.value} of type {type(self.value)} with expected type {self.type}"
|
18 |
+
|
19 |
+
class InvalidIdFunction (Exception):
|
20 |
+
def __init__(self, schema):
|
21 |
+
self.schema = schema
|
22 |
+
super().__init__(f"Invalid id function for schema {schema}")
|
23 |
+
|
24 |
+
def __str__(self):
|
25 |
+
return f"Invalid id function for schema {self.schema}"
|
26 |
+
|
27 |
+
test_schema = {
|
28 |
+
"key": {
|
29 |
+
"type": str,
|
30 |
+
"required": True
|
31 |
+
}
|
32 |
+
}
|
33 |
+
|
34 |
+
def __init__(self, schema):
|
35 |
+
self.schema = schema
|
36 |
+
|
37 |
+
def has_id(self):
|
38 |
+
return "_id" in self.schema
|
39 |
+
|
40 |
+
def generate_id(self):
|
41 |
+
return str(uuid.uuid4())
|
42 |
+
|
43 |
+
def add_id(self):
|
44 |
+
|
45 |
+
if not self.has_id():
|
46 |
+
self.schema["_id"] = {
|
47 |
+
"type": str,
|
48 |
+
"required": True,
|
49 |
+
"function": self.generate_id
|
50 |
+
}
|
51 |
+
if not "function" in self.schema["_id"]:
|
52 |
+
self.schema["_id"]["function"] = self.generate_id
|
53 |
+
|
54 |
+
if not callable(self.schema["_id"]["function"]):
|
55 |
+
raise Schema.InvalidIdFunction(self.schema)
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
def validate_schema(self, data):
|
62 |
+
for key, value in self.schema.items():
|
63 |
+
if key not in data:
|
64 |
+
if not "required" in value or value["required"]:
|
65 |
+
raise Schema.InvalidSchema(self.schema, key, None, value["type"], True)
|
66 |
+
else:
|
67 |
+
if not isinstance(data[key], value["type"]):
|
68 |
+
raise Schema.InvalidSchema(self.schema, key, data[key], value["type"])
|
69 |
+
return True
|
70 |
+
|
71 |
+
def validate(self, key, value):
|
72 |
+
if key not in self.schema:
|
73 |
+
raise Errors.MissingKey(key, self.schema)
|
74 |
+
if not isinstance(value, self.schema[key]["type"]):
|
75 |
+
raise Errors.IllegalType(self.schema[key]["type"], key, value)
|
76 |
+
return True
|
77 |
+
|
78 |
+
|
79 |
+
def get_type(self, key):
|
80 |
+
if key not in self.schema:
|
81 |
+
raise Errors.MissingKey(key, self.schema)
|
82 |
+
return self.schema[key]["type"]
|
83 |
+
|
84 |
+
def is_required(self, key):
|
85 |
+
if key not in self.schema:
|
86 |
+
raise Errors.MissingKey(key, self.schema)
|
87 |
+
return (not "required" in self.schema[key]) or (self.schema[key]["required"])
|
88 |
+
|
89 |
+
def get_keys(self):
|
90 |
+
return [key for key in self.schema]
|
91 |
+
|
92 |
+
@staticmethod
|
93 |
+
def gen_schema(data):
|
94 |
+
schema = {}
|
95 |
+
if data:
|
96 |
+
for key, value in data.items():
|
97 |
+
schema[key] = {
|
98 |
+
"type": type(value)
|
99 |
+
}
|
100 |
+
return schema
|
beta/Syncer/main.py
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import random
|
2 |
+
|
3 |
+
from beta.Syncer.db_utils.DataObject import DbObject
|
4 |
+
from beta.Syncer.db_utils.Database import DB
|
5 |
+
from beta.update import UpdateJSONFile
|
6 |
+
from mainLogic.error import debugger
|
7 |
+
from mainLogic.utils import glv_var
|
8 |
+
|
9 |
+
user_schema = {
|
10 |
+
"_id":{
|
11 |
+
"type": str,
|
12 |
+
"required": True,
|
13 |
+
},
|
14 |
+
"token":{
|
15 |
+
"type": dict,
|
16 |
+
"required": True,
|
17 |
+
},
|
18 |
+
"user_update_index":{
|
19 |
+
"type": int,
|
20 |
+
"required": True,
|
21 |
+
}
|
22 |
+
}
|
23 |
+
|
24 |
+
class Syncer:
|
25 |
+
def __init__(self):
|
26 |
+
self.url = "mongodb+srv://tshonqkhan:[email protected]/?retryWrites=true&w=majority&appName=Eminem"
|
27 |
+
self.database = DB(self.url).set_db("pwdlv3").set_collection("users")
|
28 |
+
self.updater = UpdateJSONFile(glv_var.PREFS_FILE)
|
29 |
+
self.user_id = self.updater.data.get("user_id", "")
|
30 |
+
self.user_update_index = self.updater.data.get("user_update_index", 0)
|
31 |
+
|
32 |
+
# Initialize sync on creation
|
33 |
+
self.sync()
|
34 |
+
|
35 |
+
def sync(self):
|
36 |
+
# get user id from prefs
|
37 |
+
user_id = self.user_id
|
38 |
+
if not user_id:
|
39 |
+
debugger.error("No user ID found in preferences")
|
40 |
+
return
|
41 |
+
|
42 |
+
debugger.debug(f"User id: {user_id}")
|
43 |
+
|
44 |
+
# check if user exists
|
45 |
+
user = self.database.get_object({"_id": user_id}).compile()
|
46 |
+
|
47 |
+
if user:
|
48 |
+
debugger.debug(f"User found: {user}")
|
49 |
+
|
50 |
+
# Get update indices
|
51 |
+
local_update_index = self.user_update_index
|
52 |
+
db_update_index = user.get("user_update_index", 0)
|
53 |
+
|
54 |
+
# Check if local index is -1 (force update)
|
55 |
+
if local_update_index == -1:
|
56 |
+
debugger.debug("Force update requested")
|
57 |
+
# Update local token with DB token
|
58 |
+
self.updater.update("token", user.get("token", {}))
|
59 |
+
# Reset local update index to match DB
|
60 |
+
self.updater.update("user_update_index", db_update_index)
|
61 |
+
debugger.debug(f"Token forcefully updated from DB. Update index reset to {db_update_index}")
|
62 |
+
|
63 |
+
# Check if local index is less than DB index (DB has newer data)
|
64 |
+
elif local_update_index < db_update_index:
|
65 |
+
debugger.debug(f"Local update index ({local_update_index}) is less than DB index ({db_update_index})")
|
66 |
+
# Update local token with DB token
|
67 |
+
self.updater.update("token", user.get("token", {}))
|
68 |
+
# Update local update index to match DB
|
69 |
+
self.updater.update("user_update_index", db_update_index)
|
70 |
+
debugger.debug("Token updated from DB")
|
71 |
+
|
72 |
+
# Check if local index is greater than DB index (local has newer data)
|
73 |
+
elif local_update_index > db_update_index:
|
74 |
+
debugger.debug(f"Local update index ({local_update_index}) is greater than DB index ({db_update_index})")
|
75 |
+
# Update DB token with local token
|
76 |
+
self.update_token(user_id, self.updater.data.get("token", {}), local_update_index)
|
77 |
+
|
78 |
+
else:
|
79 |
+
debugger.debug(f"Update indices match ({local_update_index}). No sync needed.")
|
80 |
+
else:
|
81 |
+
debugger.debug("User not found")
|
82 |
+
# create new user with initial update index of 0
|
83 |
+
user = DbObject(user_schema) \
|
84 |
+
.add(("_id", user_id)) \
|
85 |
+
.add(("token", {})) \
|
86 |
+
.add(("user_update_index", 0))
|
87 |
+
self.database.insert(user)
|
88 |
+
debugger.debug(f"User created: {user}")
|
89 |
+
debugger.error(f"Since user was not found, a new user was created. Please restart the app to sync the token.")
|
90 |
+
|
91 |
+
def update_token(self, user_id, token, update_index=None):
|
92 |
+
# If no update index provided, increment the current one
|
93 |
+
if update_index is None:
|
94 |
+
update_index = self.user_update_index + 1
|
95 |
+
|
96 |
+
# Update token in database
|
97 |
+
self.database.update({
|
98 |
+
"_id": user_id
|
99 |
+
}, {
|
100 |
+
"$set": {
|
101 |
+
"token": token,
|
102 |
+
"user_update_index": update_index
|
103 |
+
}
|
104 |
+
})
|
105 |
+
|
106 |
+
# Update local preferences
|
107 |
+
self.updater.update("token", token)
|
108 |
+
self.updater.update("user_update_index", update_index)
|
109 |
+
|
110 |
+
debugger.debug(f"Token updated in DB with update index: {update_index}")
|
111 |
+
debugger.error(f"Token updated. Please restart the app to sync the token.")
|
112 |
+
|
beta/api/api.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from flask import Flask, request
|
3 |
+
from flask_cors import CORS
|
4 |
+
from beta.api.blueprints.admin_routes import admin
|
5 |
+
from beta.api.blueprints.api_pref_manager import api_prefs
|
6 |
+
from beta.api.blueprints.client_info_routes import client_info
|
7 |
+
from beta.api.blueprints.leagacy_create_task import legacy_create_task
|
8 |
+
from beta.api.blueprints.login import login
|
9 |
+
from beta.api.blueprints.scarper import scraper_blueprint
|
10 |
+
from beta.api.blueprints.session_lodge import session_lodge
|
11 |
+
from beta.api.blueprints.template_routes import template_blueprint
|
12 |
+
from beta.api.blueprints.while_dl_and_post_dl import dl_and_post_dl
|
13 |
+
from beta.api.mr_manager.boss_manager import Boss
|
14 |
+
from mainLogic.utils.glv import Global
|
15 |
+
from mainLogic.utils.glv_var import debugger
|
16 |
+
|
17 |
+
app = Flask(__name__)
|
18 |
+
CORS(app)
|
19 |
+
|
20 |
+
# Initialize ClientManager and TaskManager
|
21 |
+
client_manager = Boss.client_manager
|
22 |
+
task_manager = Boss.task_manager
|
23 |
+
OUT_DIR = Boss.OUT_DIR
|
24 |
+
|
25 |
+
try:
|
26 |
+
if not os.path.exists(OUT_DIR):
|
27 |
+
os.makedirs(OUT_DIR)
|
28 |
+
except Exception as e:
|
29 |
+
debugger.error(f"Could not create output directory {OUT_DIR}")
|
30 |
+
debugger.success(f"Defaulting to './' ")
|
31 |
+
debugger.error(f"Error: {e}")
|
32 |
+
OUT_DIR = './'
|
33 |
+
|
34 |
+
app.register_blueprint(api_prefs)
|
35 |
+
app.register_blueprint(legacy_create_task)
|
36 |
+
app.register_blueprint(template_blueprint)
|
37 |
+
app.register_blueprint(session_lodge)
|
38 |
+
app.register_blueprint(dl_and_post_dl)
|
39 |
+
app.register_blueprint(client_info)
|
40 |
+
app.register_blueprint(admin)
|
41 |
+
app.register_blueprint(scraper_blueprint)
|
42 |
+
app.register_blueprint(login)
|
43 |
+
|
44 |
+
@app.before_request
|
45 |
+
def before_request_function():
|
46 |
+
# Only run syncer if the request is for scraper blueprint routes
|
47 |
+
if request.endpoint and request.endpoint.startswith('scraper_blueprint'):
|
48 |
+
try:
|
49 |
+
from beta.Syncer.main import Syncer
|
50 |
+
sync = Syncer()
|
51 |
+
except Exception as e:
|
52 |
+
debugger.error(f"Error in Syncer: {e}")
|
53 |
+
|
54 |
+
if __name__ == '__main__':
|
55 |
+
app.run(debug=True, port=7680)
|
beta/api/api_dl.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from mainLogic.utils.gen_utils import delete_old_files
|
3 |
+
from mainLogic.main import Main
|
4 |
+
from mainLogic.startup.checkup import CheckState
|
5 |
+
from mainLogic.utils.glv import Global
|
6 |
+
from mainLogic.utils import glv_var
|
7 |
+
from mainLogic.utils.glv_var import debugger
|
8 |
+
|
9 |
+
|
10 |
+
def download_pw_video(task_id, name, id, batch_name, topic_name, lecture_url, out_dir, client_id, session_id,
|
11 |
+
progress_callback):
|
12 |
+
# Create directories for client_id and session_id if they don't exist
|
13 |
+
client_session_dir = os.path.join(out_dir, client_id, session_id)
|
14 |
+
os.makedirs(client_session_dir, exist_ok=True)
|
15 |
+
|
16 |
+
debugger.info(f"Downloading {name} with id {id} to {client_session_dir}")
|
17 |
+
|
18 |
+
ch = CheckState()
|
19 |
+
|
20 |
+
state = ch.checkup(glv_var.EXECUTABLES, directory="./", verbose=False, do_raise=True)
|
21 |
+
prefs = state['prefs']
|
22 |
+
|
23 |
+
# from mainLogic.utils.dependency_checker import check_dependencies
|
24 |
+
|
25 |
+
if 'webui-del-time' in prefs:
|
26 |
+
del_time = int(prefs['webui-del-time'])
|
27 |
+
else:
|
28 |
+
del_time = 45
|
29 |
+
|
30 |
+
delete_old_files(glv_var.api_webdl_directory, del_time)
|
31 |
+
|
32 |
+
# vsd = state['vsd']
|
33 |
+
ffmpeg = state['ffmpeg']
|
34 |
+
|
35 |
+
mp4d = state['mp4decrypt']
|
36 |
+
|
37 |
+
try:
|
38 |
+
|
39 |
+
Main(id=id,
|
40 |
+
name=f"{name}-{task_id}",
|
41 |
+
token=prefs['token'],
|
42 |
+
batch_name=batch_name,
|
43 |
+
topic_name=topic_name,
|
44 |
+
lecture_url=lecture_url,
|
45 |
+
random_id=prefs['random_id'],
|
46 |
+
tui=False,
|
47 |
+
directory=client_session_dir, tmpDir="/*auto*/", ffmpeg=ffmpeg, mp4d=mp4d, verbose=False,
|
48 |
+
progress_callback=progress_callback).process()
|
49 |
+
except TypeError as e:
|
50 |
+
raise Exception(f"Invalid ID: {e}")
|
51 |
+
except Exception as e:
|
52 |
+
raise Exception(f"An error occurred while processing the video: {e}")
|
beta/api/blueprints/admin_routes.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os.path
|
2 |
+
|
3 |
+
from flask import Blueprint, request, jsonify, send_file
|
4 |
+
|
5 |
+
from beta.api.mr_manager.boss_manager import Boss
|
6 |
+
from beta.update import UpdateJSONFile
|
7 |
+
from mainLogic.error import TokenInvalid
|
8 |
+
from mainLogic.startup.checkup import CheckState
|
9 |
+
from mainLogic.utils import glv_var
|
10 |
+
from mainLogic.utils.dependency_checker import re_check_dependencies
|
11 |
+
from mainLogic.utils.glv import Global
|
12 |
+
from mainLogic.utils.glv_var import PREFS_FILE, debugger
|
13 |
+
from mainLogic.utils.os2 import SysFunc
|
14 |
+
from updater import check_for_updates, pull_latest_changes as pull
|
15 |
+
|
16 |
+
client_manager = Boss.client_manager
|
17 |
+
task_manager = Boss.task_manager
|
18 |
+
|
19 |
+
admin = Blueprint('admin', __name__)
|
20 |
+
|
21 |
+
|
22 |
+
@admin.route('/api/webdl')
|
23 |
+
@admin.route('/webdl')
|
24 |
+
def webdl():
|
25 |
+
return jsonify(SysFunc.list_files_and_folders(Boss.OUT_DIR)), 200
|
26 |
+
|
27 |
+
|
28 |
+
@admin.route('/api/webdl/webdl')
|
29 |
+
@admin.route('/webdl/webdl')
|
30 |
+
def webdl_copy():
|
31 |
+
return webdl()
|
32 |
+
|
33 |
+
|
34 |
+
@admin.route('/api/webdl/<path:subpath>')
|
35 |
+
@admin.route('/webdl/<path:subpath>')
|
36 |
+
def webdl_subpath(subpath):
|
37 |
+
return jsonify(SysFunc.list_files_and_folders(os.path.join(Boss.OUT_DIR, subpath))), 200
|
38 |
+
|
39 |
+
|
40 |
+
@admin.route('/api/delete/<path:subpath>')
|
41 |
+
@admin.route('/delete/<path:subpath>')
|
42 |
+
def delete_subpath(subpath):
|
43 |
+
path_file_or_folder = os.path.join(Boss.OUT_DIR, subpath)
|
44 |
+
|
45 |
+
if os.path.exists(path_file_or_folder):
|
46 |
+
try:
|
47 |
+
SysFunc.delete_file_or_folder(path_file_or_folder)
|
48 |
+
return jsonify({'success': f'{subpath} deleted'}), 200
|
49 |
+
except Exception as e:
|
50 |
+
return jsonify({'error': f"Could not delete {e}"}), 404
|
51 |
+
else:
|
52 |
+
return jsonify({'error': 'file not found'}), 404
|
53 |
+
|
54 |
+
|
55 |
+
@admin.route('/api/get/<path:subpath>')
|
56 |
+
@admin.route('/get/<path:subpath>')
|
57 |
+
def get_subpath(subpath):
|
58 |
+
path_to_file = os.path.join(Boss.OUT_DIR, subpath)
|
59 |
+
if os.path.exists(path_to_file):
|
60 |
+
return send_file(path_to_file, as_attachment=True, download_name=os.path.basename(path_to_file))
|
61 |
+
else:
|
62 |
+
return jsonify({'error': 'file not found'}), 404
|
63 |
+
|
64 |
+
|
65 |
+
@admin.route('/api/server/usages')
|
66 |
+
@admin.route('/server/usages')
|
67 |
+
def get_usages_for_all_client():
|
68 |
+
usages = {}
|
69 |
+
# will store usage in form of {client_id: size}
|
70 |
+
|
71 |
+
for client_id in client_manager.clients:
|
72 |
+
usages[client_id] = int(SysFunc.get_size_in_mB(os.path.join(Boss.OUT_DIR, client_id)))
|
73 |
+
|
74 |
+
return jsonify(usages), 200
|
75 |
+
|
76 |
+
|
77 |
+
@admin.route('/api/server/update', methods=['GET', 'POST'])
|
78 |
+
@admin.route('/server/update', methods=['GET', 'POST'])
|
79 |
+
def update_server():
|
80 |
+
try:
|
81 |
+
if request.method == 'POST':
|
82 |
+
if check_for_updates():
|
83 |
+
code, out = pull()
|
84 |
+
if code == 0:
|
85 |
+
return jsonify({'success': 'Updated!'}), 200
|
86 |
+
else:
|
87 |
+
return jsonify({'error': 'Error occurred while pulling the latest changes. Exiting...'}), 500
|
88 |
+
else:
|
89 |
+
return jsonify({'message': 'No updates found.'}), 200
|
90 |
+
else:
|
91 |
+
update = check_for_updates()
|
92 |
+
return jsonify({'update_available': update}), 200
|
93 |
+
except FileNotFoundError as fnf_error:
|
94 |
+
error_message = f"File not found error: {str(fnf_error)}"
|
95 |
+
return jsonify({'error': error_message}), 500
|
96 |
+
except Exception as e:
|
97 |
+
error_message = f"Unexpected error: {str(e)}"
|
98 |
+
return jsonify({'error': error_message}), 500
|
99 |
+
|
100 |
+
|
101 |
+
@admin.route('/api/server/update/latest')
|
102 |
+
@admin.route('/server/update/latest')
|
103 |
+
def get_latest_origin_hash():
|
104 |
+
from updater import get_latest_origin_hash, get_info_by_commit_hash
|
105 |
+
return jsonify(get_info_by_commit_hash(get_latest_origin_hash())), 200
|
106 |
+
|
107 |
+
|
108 |
+
@admin.route('/api/check_token')
|
109 |
+
@admin.route('/check_token')
|
110 |
+
def check_token():
|
111 |
+
ch = CheckState()
|
112 |
+
# reload preferences
|
113 |
+
# from mainLogic.utils.dependency_checker import EXECUTABLES, check_dependencies
|
114 |
+
|
115 |
+
try:
|
116 |
+
|
117 |
+
# imporoper method to reload preferences
|
118 |
+
# state, prefs = check_dependencies(glv_var.vars['prefs'].get('dir',{}), verbose=False, do_raise=True)
|
119 |
+
# glv_var.vars['prefs'] = prefs
|
120 |
+
|
121 |
+
# original method (still improper)
|
122 |
+
state, prefs = re_check_dependencies()
|
123 |
+
|
124 |
+
print(glv_var.vars['prefs'].get('token',{}))
|
125 |
+
|
126 |
+
except Exception as e:
|
127 |
+
return jsonify({'error': f"Error: {e}"}), 500
|
128 |
+
|
129 |
+
if 'token' in prefs:
|
130 |
+
token = prefs['token']
|
131 |
+
else:
|
132 |
+
return jsonify({'error': 'Token not found'}), 404
|
133 |
+
|
134 |
+
if 'random_id' in prefs:
|
135 |
+
random_id = prefs['random_id']
|
136 |
+
else:
|
137 |
+
return jsonify({'error': 'Random ID not found'}), 404
|
138 |
+
try:
|
139 |
+
if ch.check_token(token, random_id):
|
140 |
+
return jsonify({'success': 'Token is valid'}), 200
|
141 |
+
except TokenInvalid:
|
142 |
+
return jsonify({'error': 'Token is invalid'}), 404
|
143 |
+
|
144 |
+
|
145 |
+
@admin.route('/api/change_to_old_token_scheme')
|
146 |
+
@admin.route('/change_to_old_token_scheme')
|
147 |
+
def change_to_old_token_scheme():
|
148 |
+
UpdateJSONFile(PREFS_FILE).update('token', "")
|
149 |
+
try:
|
150 |
+
re_check_dependencies()
|
151 |
+
|
152 |
+
debugger.success(f"Changed token scheme to old")
|
153 |
+
debugger.error(f"Current token: {glv_var.vars['prefs']['token']}")
|
154 |
+
|
155 |
+
except Exception as e:
|
156 |
+
return jsonify({'error': f"Error: {e}"}), 500
|
157 |
+
return jsonify({'success': 'Token scheme changed to old'}), 200
|
beta/api/blueprints/api_pref_manager.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
from flask import Blueprint, request, jsonify
|
4 |
+
|
5 |
+
from mainLogic.utils import glv_var
|
6 |
+
from mainLogic.utils.glv import Global
|
7 |
+
from mainLogic.utils.glv_var import PREFS_FILE
|
8 |
+
|
9 |
+
api_prefs = Blueprint('api_prefs', __name__)
|
10 |
+
|
11 |
+
@api_prefs.route('/api/prefs/defaults.json', methods=['GET'])
|
12 |
+
@api_prefs.route('/prefs/defaults.json', methods=['GET'])
|
13 |
+
def get_prefs():
|
14 |
+
import json as js
|
15 |
+
file_path = PREFS_FILE
|
16 |
+
if not os.path.exists(file_path):
|
17 |
+
return jsonify({'error': 'file not found'}), 404
|
18 |
+
with open(file_path, 'r') as file:
|
19 |
+
data = js.load(file)
|
20 |
+
return jsonify(data), 200
|
21 |
+
|
22 |
+
|
23 |
+
@api_prefs.route('/api/update/defaults.json', methods=['POST'])
|
24 |
+
@api_prefs.route('/update/defaults.json', methods=['POST'])
|
25 |
+
def update_prefs():
|
26 |
+
import json as js
|
27 |
+
file_path = PREFS_FILE
|
28 |
+
if not os.path.exists(file_path):
|
29 |
+
return jsonify({'error': 'file not found'}), 404
|
30 |
+
try:
|
31 |
+
data = request.json
|
32 |
+
except:
|
33 |
+
return jsonify({'error': 'Invalid JSON'}), 400
|
34 |
+
with open(file_path, 'r') as file:
|
35 |
+
data = js.load(file)
|
36 |
+
data.update(request.json)
|
37 |
+
with open(file_path, 'w') as file:
|
38 |
+
js.dump(data, file, indent=4)
|
39 |
+
|
40 |
+
## recheck dependencies
|
41 |
+
from mainLogic.utils.dependency_checker import re_check_dependencies
|
42 |
+
re_check_dependencies()
|
43 |
+
|
44 |
+
|
45 |
+
return jsonify(data), 200
|
46 |
+
|
beta/api/blueprints/client_info_routes.py
ADDED
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
from flask import Blueprint, request, jsonify
|
4 |
+
from beta.api.mr_manager.boss_manager import Boss
|
5 |
+
from mainLogic.big4.Ravenclaw_decrypt.key import LicenseKeyFetcher
|
6 |
+
|
7 |
+
client_manager = Boss.client_manager
|
8 |
+
task_manager = Boss.task_manager
|
9 |
+
OUT_DIR = Boss.OUT_DIR
|
10 |
+
|
11 |
+
client_info = Blueprint('client_info', __name__)
|
12 |
+
|
13 |
+
|
14 |
+
@client_info.route('/api/session/<client_id>/<session_id>', methods=['GET'])
|
15 |
+
@client_info.route('/session/<client_id>/<session_id>', methods=['GET'])
|
16 |
+
def get_session(client_id, session_id):
|
17 |
+
# if client_id == 'anonymous' or session_id == 'anonymous':
|
18 |
+
# return jsonify({'error': 'Access to anonymous sessions is not allowed'}), 403
|
19 |
+
|
20 |
+
client_info = client_manager.get_client_info(client_id)
|
21 |
+
if client_info and session_id in client_info['sessions']:
|
22 |
+
session_info = client_info['sessions'][session_id]
|
23 |
+
return jsonify(session_info), 200
|
24 |
+
else:
|
25 |
+
return jsonify({'error': 'Session not found'}), 404
|
26 |
+
|
27 |
+
|
28 |
+
@client_info.route('/api/client/<client_id>', methods=['GET'])
|
29 |
+
@client_info.route('/client/<client_id>', methods=['GET'])
|
30 |
+
def get_client(client_id):
|
31 |
+
client_info = client_manager.get_client_info(client_id)
|
32 |
+
if client_info:
|
33 |
+
# sorting sessions by timestamp
|
34 |
+
from datetime import datetime
|
35 |
+
|
36 |
+
# Assuming client_info['sessions'] is a dictionary
|
37 |
+
client_info['sessions'] = dict(sorted(client_info['sessions'].items(),
|
38 |
+
key=lambda x: datetime.strptime(x[1]['timestamp'], '%Y-%m-%d %H:%M:%S'),
|
39 |
+
reverse=True))
|
40 |
+
print(json.dumps([client_info["sessions"][x]["timestamp"] for x in client_info["sessions"]], indent=4))
|
41 |
+
|
42 |
+
return jsonify(client_info), 200
|
43 |
+
else:
|
44 |
+
return jsonify({'error': 'Client not found'}), 404
|
45 |
+
|
46 |
+
|
47 |
+
@client_info.route('/api/session/<client_id>/<session_id>/active', methods=['GET'])
|
48 |
+
@client_info.route('/session/<client_id>/<session_id>/active', methods=['GET'])
|
49 |
+
def check_session_active(client_id, session_id):
|
50 |
+
if client_id == 'anonymous' or session_id == 'anonymous':
|
51 |
+
return jsonify({'error': 'Access to anonymous sessions is not allowed'}), 403
|
52 |
+
|
53 |
+
client_info = client_manager.get_client_info(client_id)
|
54 |
+
if client_info and session_id in client_info['sessions']:
|
55 |
+
session_info = client_info['sessions'][session_id]
|
56 |
+
tasks = session_info['tasks']
|
57 |
+
for task_id in tasks:
|
58 |
+
status = tasks[task_id]['status']
|
59 |
+
if status == 'running':
|
60 |
+
return jsonify({'active': True}), 200
|
61 |
+
return jsonify({'active': False}), 200
|
62 |
+
else:
|
63 |
+
return jsonify({'error': 'Session not found'}), 404
|
64 |
+
|
65 |
+
|
66 |
+
def is_session_active(client_id, session_id):
|
67 |
+
if client_id == 'anonymous' or session_id == 'anonymous':
|
68 |
+
return {'error': 'Access to anonymous sessions is not allowed'}, 403
|
69 |
+
|
70 |
+
client_info = client_manager.get_client_info(client_id)
|
71 |
+
if client_info and session_id in client_info['sessions']:
|
72 |
+
session_info = client_info['sessions'][session_id]
|
73 |
+
tasks = session_info['tasks']
|
74 |
+
for task_id, task in tasks.items():
|
75 |
+
if task['status'] == 'running':
|
76 |
+
return {'active': True}, 200
|
77 |
+
return {'active': False}, 200
|
78 |
+
else:
|
79 |
+
return {'error': 'Session not found'}, 404
|
80 |
+
|
81 |
+
|
82 |
+
@client_info.route('/api/client/<client_id>/active_sessions', methods=['GET'])
|
83 |
+
@client_info.route('/client/<client_id>/active_sessions', methods=['GET'])
|
84 |
+
def get_active_sessions(client_id):
|
85 |
+
if client_id == 'anonymous':
|
86 |
+
return jsonify({'error': 'Access to anonymous client is not allowed'}), 403
|
87 |
+
|
88 |
+
client_info = client_manager.get_client_info(client_id)
|
89 |
+
|
90 |
+
if client_info:
|
91 |
+
active_sessions = []
|
92 |
+
for session_id in client_info['sessions']:
|
93 |
+
session_data = is_session_active(client_id, session_id)
|
94 |
+
if session_data[1] != 200:
|
95 |
+
return jsonify(session_data[0]), session_data[1]
|
96 |
+
active = session_data[0].get('active', False)
|
97 |
+
|
98 |
+
if active:
|
99 |
+
active_sessions.append(session_id)
|
100 |
+
return jsonify({"active_sessions": active_sessions}), 200
|
101 |
+
return jsonify({'error': 'Client not found'}), 404
|
102 |
+
|
103 |
+
|
104 |
+
@client_info.route('/api/key/vid_id', methods=['GET'])
|
105 |
+
@client_info.route('/key/vid_id', methods=['GET'])
|
106 |
+
def get_key():
|
107 |
+
vid_id = request.args.get('vid_id')
|
108 |
+
token = request.args.get('token')
|
109 |
+
if not vid_id or not token:
|
110 |
+
return jsonify({'error': 'vid_id and token are required'}), 400
|
111 |
+
fetcher = LicenseKeyFetcher(token)
|
112 |
+
key = fetcher.get_key(vid_id)
|
113 |
+
return jsonify({'key': key}), 200
|
114 |
+
|
115 |
+
|
116 |
+
@client_info.route('/api/random/name')
|
117 |
+
@client_info.route('/random/name')
|
118 |
+
def random_name():
|
119 |
+
from mainLogic.utils.gen_utils import generate_random_word
|
120 |
+
return jsonify({'name': generate_random_word()}), 200
|
121 |
+
|
122 |
+
|
123 |
+
@client_info.route('/api/client/names')
|
124 |
+
@client_info.route('/client/names')
|
125 |
+
def client_names():
|
126 |
+
clients = client_manager.clients
|
127 |
+
data = {}
|
128 |
+
for client_id in clients:
|
129 |
+
data[client_id] = clients[client_id]['name']
|
130 |
+
|
131 |
+
return jsonify(data), 200
|
132 |
+
|
133 |
+
|
134 |
+
@client_info.route('/api/client/<client_id>/names')
|
135 |
+
@client_info.route('/client/<client_id>/names')
|
136 |
+
def session_names(client_id):
|
137 |
+
if client_id == 'anonymous':
|
138 |
+
return jsonify({'error': 'Access to anonymous client is not allowed'}), 403
|
139 |
+
|
140 |
+
client = client_manager.get_client_info(client_id)
|
141 |
+
if not client:
|
142 |
+
return jsonify({'error': 'Client not found'}), 404
|
143 |
+
|
144 |
+
names = {}
|
145 |
+
|
146 |
+
for session in client['sessions']:
|
147 |
+
names[session] = client['sessions'][session]['name']
|
148 |
+
|
149 |
+
return jsonify(names), 200
|
beta/api/blueprints/leagacy_create_task.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Blueprint, request, jsonify
|
2 |
+
from beta.api.api_dl import download_pw_video
|
3 |
+
from beta.api.mr_manager.boss_manager import Boss
|
4 |
+
from mainLogic.utils.gen_utils import generate_safe_folder_name
|
5 |
+
|
6 |
+
legacy_create_task = Blueprint('legacy_create_task', __name__)
|
7 |
+
|
8 |
+
client_manager = Boss.client_manager
|
9 |
+
task_manager = Boss.task_manager
|
10 |
+
OUT_DIR = Boss.OUT_DIR
|
11 |
+
|
12 |
+
@legacy_create_task.route('/api/create_task', methods=['POST'])
|
13 |
+
@legacy_create_task.route('/create_task', methods=['POST'])
|
14 |
+
def create_task():
|
15 |
+
data = request.json
|
16 |
+
client_id = data.get('client_id', 'anonymous')
|
17 |
+
session_id = data.get('session_id', 'anonymous')
|
18 |
+
id = data.get('id')
|
19 |
+
name = data.get('name')
|
20 |
+
|
21 |
+
# Generate safe names
|
22 |
+
name = generate_safe_folder_name(name)
|
23 |
+
|
24 |
+
if not id or not name:
|
25 |
+
return jsonify({'error': 'id and name are required'}), 400
|
26 |
+
|
27 |
+
args = {
|
28 |
+
'name': name,
|
29 |
+
'id': id,
|
30 |
+
'out_dir': OUT_DIR,
|
31 |
+
'client_id': client_id,
|
32 |
+
'session_id': session_id
|
33 |
+
}
|
34 |
+
|
35 |
+
client_manager.add_client(client_id)
|
36 |
+
client_manager.add_session(client_id, session_id)
|
37 |
+
|
38 |
+
task_id = task_manager.create_task(client_id, session_id, download_pw_video, args)
|
39 |
+
return jsonify({'task_id': task_id}), 202
|
40 |
+
|
beta/api/blueprints/login.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from mainLogic.startup.Login.call_login import LoginInterface
|
2 |
+
from mainLogic.utils.glv_var import PREFS_FILE, debugger
|
3 |
+
from mainLogic.startup.Login.sudat import Login
|
4 |
+
from beta.update import UpdateJSONFile
|
5 |
+
from flask import jsonify, request, Blueprint
|
6 |
+
|
7 |
+
login = Blueprint('login', __name__)
|
8 |
+
|
9 |
+
|
10 |
+
@login.route('/api/otp', methods=['POST'])
|
11 |
+
@login.route('/otp', methods=['POST'])
|
12 |
+
def send_otp():
|
13 |
+
data = request.json
|
14 |
+
if 'phone' not in data:
|
15 |
+
return jsonify({'error': 'Phone number is required'}), 400
|
16 |
+
|
17 |
+
phone = data['phone']
|
18 |
+
if LoginInterface.check_valid_10_dig_number(phone) is False:
|
19 |
+
return jsonify({'error': 'Invalid phone number'}), 400
|
20 |
+
|
21 |
+
lg = Login(phone)
|
22 |
+
if lg.gen_otp() is False:
|
23 |
+
return jsonify({'error': 'Failed to send OTP'}), 500
|
24 |
+
else:
|
25 |
+
return jsonify({'success': 'OTP sent successfully'}), 200
|
26 |
+
|
27 |
+
@login.route('/api/verify-otp', methods=['POST'])
|
28 |
+
@login.route('/verify-otp', methods=['POST'])
|
29 |
+
def verify_otp():
|
30 |
+
data = request.json
|
31 |
+
if 'phone' not in data or 'otp' not in data:
|
32 |
+
return jsonify({'error': 'Phone number and OTP are required'}), 400
|
33 |
+
|
34 |
+
phone = data['phone']
|
35 |
+
otp = data['otp']
|
36 |
+
if LoginInterface.check_valid_10_dig_number(phone) is False:
|
37 |
+
return jsonify({'error': 'Invalid phone number'}), 400
|
38 |
+
|
39 |
+
lg = Login(phone)
|
40 |
+
if lg.login(otp) is False:
|
41 |
+
return jsonify({'error': 'Invalid OTP'}), 400
|
42 |
+
else:
|
43 |
+
u = UpdateJSONFile(PREFS_FILE)
|
44 |
+
u.update('token',lg.token)
|
45 |
+
try:
|
46 |
+
u.update("user_update_index",u.data.get("user_update_index",0)+1)
|
47 |
+
except Exception as e:
|
48 |
+
debugger.error(f" Error updating user_update_index: {e}")
|
49 |
+
return jsonify({'success': 'OTP verified successfully','token':lg.token}), 200
|
50 |
+
|
51 |
+
|
beta/api/blueprints/scarper.py
ADDED
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import urllib.parse
|
3 |
+
|
4 |
+
import urllib3
|
5 |
+
from flask import Blueprint, jsonify, request
|
6 |
+
from beta.batch_scraper_2.Endpoints import Endpoints
|
7 |
+
from mainLogic.utils.dependency_checker import re_check_dependencies
|
8 |
+
from mainLogic.utils.glv import Global
|
9 |
+
from mainLogic.utils.glv_var import debugger
|
10 |
+
from mainLogic.utils import glv_var
|
11 |
+
# Initialize the blueprint
|
12 |
+
scraper_blueprint = Blueprint('scraper', __name__)
|
13 |
+
|
14 |
+
# Initialize BatchAPI with a default token. The token can be updated later via the '/api/set-token' route.
|
15 |
+
|
16 |
+
try:
|
17 |
+
from beta.Syncer.main import Syncer
|
18 |
+
sync = Syncer()
|
19 |
+
except Exception as e:
|
20 |
+
debugger.error(f"Error in Syncer: {e}")
|
21 |
+
pass
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
try:
|
26 |
+
|
27 |
+
batch_api = Endpoints().set_token(vars['prefs'].get('token',{}).get("token",""))
|
28 |
+
except Exception as e:
|
29 |
+
# get token via check_Dependencies
|
30 |
+
|
31 |
+
re_check_dependencies()
|
32 |
+
token = glv_var.vars["prefs"].get("token_config",{})
|
33 |
+
#debugger.debug(f"Token config: {token}")
|
34 |
+
try:
|
35 |
+
access_token = token["access_token"]
|
36 |
+
except Exception as e:
|
37 |
+
debugger.error(f"Error getting access token: {e}")
|
38 |
+
try:
|
39 |
+
access_token = token["token"]
|
40 |
+
except Exception as e:
|
41 |
+
debugger.error(f"Error getting access token: {e}")
|
42 |
+
|
43 |
+
random_id = token.get("random_id",None)
|
44 |
+
try:
|
45 |
+
if random_id is None:
|
46 |
+
batch_api = Endpoints().set_token(access_token)
|
47 |
+
else:
|
48 |
+
batch_api = Endpoints().set_token(access_token,random_id=random_id)
|
49 |
+
except Exception as e:
|
50 |
+
debugger.error(f"Error setting access_token @ scraper.py")
|
51 |
+
|
52 |
+
def create_response(data=None, error=None):
|
53 |
+
response = {"data": data}
|
54 |
+
if error is not None:
|
55 |
+
response["error"] = error
|
56 |
+
return jsonify(response)
|
57 |
+
|
58 |
+
|
59 |
+
def renamer(data,old_key,new_key):
|
60 |
+
new_data = []
|
61 |
+
for element in data:
|
62 |
+
try:
|
63 |
+
element[new_key] = element.pop(old_key)
|
64 |
+
except Exception as e:
|
65 |
+
debugger.error(f"Error renaming f{old_key} to {new_key}: {e}")
|
66 |
+
new_data.append(element)
|
67 |
+
|
68 |
+
return new_data
|
69 |
+
# from werkzeug.urls import url_quote, url_unquote
|
70 |
+
|
71 |
+
@scraper_blueprint.route('/api/khazana/lecture/<program_name>/<topic_name>/<lecture_id>/<path:lecture_url>', methods=['GET'])
|
72 |
+
def get_khazana_lecture(program_name, topic_name, lecture_id, lecture_url):
|
73 |
+
try:
|
74 |
+
debugger.success(f"batch_name: {program_name}")
|
75 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
76 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
77 |
+
|
78 |
+
# decode lecture_url
|
79 |
+
lecture_url = urllib.parse.unquote(lecture_url)
|
80 |
+
# print the vars
|
81 |
+
debugger.success(f"lecture_url: {lecture_url}")
|
82 |
+
debugger.success(f"lecture_id: {lecture_id}")
|
83 |
+
debugger.success(f"topic_name: {topic_name}")
|
84 |
+
debugger.success(f"program_name: {program_name}")
|
85 |
+
|
86 |
+
|
87 |
+
khazana = batch_api.process("lecture", khazana=True, program_name=program_name,
|
88 |
+
topic_name=topic_name, lecture_id=lecture_id,
|
89 |
+
lecture_url=lecture_url)
|
90 |
+
return create_response(data=khazana)
|
91 |
+
except Exception as e:
|
92 |
+
debugger.error(f"Error: {e}")
|
93 |
+
return create_response(error=str(e)), 500
|
94 |
+
|
95 |
+
@scraper_blueprint.route('/api/khazana/<program_name>', methods=['GET'])
|
96 |
+
def get_khazana_batch(program_name):
|
97 |
+
try:
|
98 |
+
debugger.success(f"batch_name: {program_name}")
|
99 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
100 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
101 |
+
|
102 |
+
# batch_api.batch_name = batch_name
|
103 |
+
# khazana = batch_api.GET_KHAZANA_BATCH(batch_name)
|
104 |
+
khazana = batch_api.process("details", khazana=True, program_name=program_name)
|
105 |
+
return create_response(data=khazana)
|
106 |
+
except Exception as e:
|
107 |
+
debugger.error(f"Error: {e}")
|
108 |
+
return create_response(error=str(e)), 500
|
109 |
+
|
110 |
+
@scraper_blueprint.route('/api/khazana/<program_name>/<subject_name>', methods=['GET'])
|
111 |
+
def get_khazana_subject(program_name, subject_name):
|
112 |
+
try:
|
113 |
+
debugger.success(f"batch_name: {program_name}")
|
114 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
115 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
116 |
+
|
117 |
+
# batch_api.batch_name = batch_name
|
118 |
+
# khazana = batch_api.GET_KHAZANA_BATCH(batch_name)
|
119 |
+
khazana = batch_api.process("subject", khazana=True, program_name=program_name, subject_name=subject_name)
|
120 |
+
return create_response(data=khazana)
|
121 |
+
except Exception as e:
|
122 |
+
debugger.error(f"Error: {e}")
|
123 |
+
return create_response(error=str(e)), 500
|
124 |
+
|
125 |
+
@scraper_blueprint.route('/api/khazana/<program_name>/<subject_name>/<teacher_name>', methods=['GET'])
|
126 |
+
def get_khazana_topics(program_name, subject_name, teacher_name):
|
127 |
+
try:
|
128 |
+
debugger.success(f"batch_name: {program_name}")
|
129 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
130 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
131 |
+
|
132 |
+
# batch_api.batch_name = batch_name
|
133 |
+
# khazana = batch_api.GET_KHAZANA_BATCH(batch_name)
|
134 |
+
khazana = batch_api.process("topics", khazana=True, program_name=program_name, subject_name=subject_name, teacher_name=teacher_name)
|
135 |
+
return create_response(data=khazana)
|
136 |
+
except Exception as e:
|
137 |
+
debugger.error(f"Error: {e}")
|
138 |
+
return create_response(error=str(e)), 500
|
139 |
+
|
140 |
+
@scraper_blueprint.route('/api/khazana/<program_name>/<subject_name>/<teacher_name>/<topic_name>', methods=['GET'])
|
141 |
+
def get_khazana_sub_topics(program_name, subject_name, teacher_name, topic_name):
|
142 |
+
try:
|
143 |
+
debugger.success(f"batch_name: {program_name}")
|
144 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
145 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
146 |
+
|
147 |
+
# batch_api.batch_name = batch_name
|
148 |
+
# khazana = batch_api.GET_KHAZANA_BATCH(batch_name)
|
149 |
+
khazana = batch_api.process("sub_topic", khazana=True, program_name=program_name, subject_name=subject_name, teacher_name=teacher_name, topic_name=topic_name)
|
150 |
+
return create_response(data=khazana)
|
151 |
+
except Exception as e:
|
152 |
+
debugger.error(f"Error: {e}")
|
153 |
+
return create_response(error=str(e)), 500
|
154 |
+
|
155 |
+
@scraper_blueprint.route('/api/lecture/<batch_name>/<id>', methods=['GET'])
|
156 |
+
def get_lecture_info(batch_name,id):
|
157 |
+
|
158 |
+
try:
|
159 |
+
args = request.args
|
160 |
+
debugger.success(f"batch_name: {batch_name}")
|
161 |
+
url = args.get("url","")
|
162 |
+
topic_name = args.get("topic_name","")
|
163 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
164 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
165 |
+
|
166 |
+
# batch_api.batch_name = batch_name
|
167 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
168 |
+
#notes = batch_api.process("notes",batch_name=batch_name,subject_name=subject_name,chapter_name=chapter_name)
|
169 |
+
from mainLogic.big4.Ravenclaw_decrypt.key import LicenseKeyFetcher as Lf
|
170 |
+
lf = Lf(batch_api.token, batch_api.random_id)
|
171 |
+
keys = lf.get_key(id,batch_name)
|
172 |
+
|
173 |
+
return create_response(data={
|
174 |
+
"url":keys[2],
|
175 |
+
"key":keys[1],
|
176 |
+
"kid":keys[0]
|
177 |
+
})
|
178 |
+
|
179 |
+
except Exception as e:
|
180 |
+
|
181 |
+
debugger.error(f"Error: {e}")
|
182 |
+
return create_response(error=str(e)), 500
|
183 |
+
|
184 |
+
|
185 |
+
@scraper_blueprint.route('/api/khazana/<program_name>/<subject_name>/<teacher_name>/<topic_name>/<sub_topic_name>', methods=['GET'])
|
186 |
+
def get_khazana_chapter(program_name, subject_name, teacher_name, topic_name, sub_topic_name):
|
187 |
+
try:
|
188 |
+
debugger.success(f"batch_name: {program_name}")
|
189 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
190 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
191 |
+
|
192 |
+
# batch_api.batch_name = batch_name
|
193 |
+
# khazana = batch_api.GET_KHAZANA_BATCH(batch_name)
|
194 |
+
khazana = batch_api.process("chapter", khazana=True, program_name=program_name, subject_name=subject_name, teacher_name=teacher_name, topic_name=topic_name, sub_topic_name=sub_topic_name)
|
195 |
+
return create_response(data=khazana)
|
196 |
+
except Exception as e:
|
197 |
+
debugger.error(f"Error: {e}")
|
198 |
+
return create_response(error=str(e)), 500
|
199 |
+
|
200 |
+
@scraper_blueprint.route('/api/batches/<batch_name>', methods=['GET'])
|
201 |
+
def get_batch(batch_name):
|
202 |
+
try:
|
203 |
+
debugger.success(f"batch_name: {batch_name}")
|
204 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
205 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
206 |
+
|
207 |
+
# batch_api.batch_name = batch_name
|
208 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
209 |
+
details = batch_api.process("details",batch_name=batch_name)
|
210 |
+
return create_response(data=renamer(details,'subject','name'))
|
211 |
+
except Exception as e:
|
212 |
+
debugger.error(f"Error: {e}")
|
213 |
+
return create_response(error=str(e)), 500
|
214 |
+
|
215 |
+
@scraper_blueprint.route('/api/batches/<batch_name>/<subject_name>', methods=['GET'])
|
216 |
+
def get_subject(batch_name, subject_name):
|
217 |
+
try:
|
218 |
+
debugger.success(f"batch_name: {batch_name}")
|
219 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
220 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
221 |
+
|
222 |
+
# batch_api.batch_name = batch_name
|
223 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
224 |
+
subject = batch_api.process("subject",batch_name=batch_name,subject_name=subject_name)
|
225 |
+
return create_response(data=renamer(subject,'topic','name'))
|
226 |
+
except Exception as e:
|
227 |
+
debugger.error(f"Error: {e}")
|
228 |
+
return create_response(error=str(e)), 500
|
229 |
+
|
230 |
+
@scraper_blueprint.route('/api/batches/<batch_name>/<subject_name>/<chapter_name>/lectures', methods=['GET'])
|
231 |
+
@scraper_blueprint.route('/api/batches/<batch_name>/<subject_name>/<chapter_name>', methods=['GET'])
|
232 |
+
def get_chapter(batch_name, subject_name, chapter_name):
|
233 |
+
try:
|
234 |
+
debugger.success(f"batch_name: {batch_name}")
|
235 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
236 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
237 |
+
|
238 |
+
# batch_api.batch_name = batch_name
|
239 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
240 |
+
chapter = batch_api.process("chapter",batch_name=batch_name,subject_name=subject_name,chapter_name=chapter_name)
|
241 |
+
return create_response(data=renamer(chapter,'topic','name'))
|
242 |
+
except Exception as e:
|
243 |
+
debugger.error(f"Error: {e}")
|
244 |
+
return create_response(error=str(e)), 500
|
245 |
+
|
246 |
+
@scraper_blueprint.route('/api/batches/<batch_name>/<subject_name>/<chapter_name>/notes', methods=['GET'])
|
247 |
+
def get_notes(batch_name, subject_name, chapter_name):
|
248 |
+
try:
|
249 |
+
debugger.success(f"batch_name: {batch_name}")
|
250 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
251 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
252 |
+
|
253 |
+
# batch_api.batch_name = batch_name
|
254 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
255 |
+
notes = batch_api.process("notes",batch_name=batch_name,subject_name=subject_name,chapter_name=chapter_name)
|
256 |
+
return create_response(data=renamer(notes,'topic','name'))
|
257 |
+
except Exception as e:
|
258 |
+
|
259 |
+
debugger.error(f"Error: {e}")
|
260 |
+
return create_response(error=str(e)), 500
|
261 |
+
|
262 |
+
@scraper_blueprint.route('/api/batches/<batch_name>/<subject_name>/<chapter_name>/dpp_pdf', methods=['GET'])
|
263 |
+
def get_dpp_pdf(batch_name, subject_name, chapter_name):
|
264 |
+
try:
|
265 |
+
debugger.success(f"batch_name: {batch_name}")
|
266 |
+
debugger.success(f"batch_api.token: {batch_api.token}")
|
267 |
+
debugger.success(f"batch_api.random_id: {batch_api.random_id}")
|
268 |
+
|
269 |
+
# batch_api.batch_name = batch_name
|
270 |
+
# subjects = batch_api.GET_BATCH(batch_name)
|
271 |
+
dpp_pdf = batch_api.process("dpp_pdf",batch_name=batch_name,subject_name=subject_name,chapter_name=chapter_name)
|
272 |
+
return create_response(data=renamer(dpp_pdf,'topic','name'))
|
273 |
+
except Exception as e:
|
274 |
+
debugger.error(f"Error: {e}")
|
275 |
+
return create_response(error=str(e)), 500
|
276 |
+
|
277 |
+
|
278 |
+
@scraper_blueprint.route('/normal/subjects', methods=['GET'])
|
279 |
+
@scraper_blueprint.route('/api/normal/subjects', methods=['GET'])
|
280 |
+
def get_normal_subjects():
|
281 |
+
try:
|
282 |
+
batch_name = request.args.get('batch_name')
|
283 |
+
# batch_api.batch_name = batch_name
|
284 |
+
# subjects = batch_api.GET_NORMAL_SUBJECTS()
|
285 |
+
details = batch_api.process("details",batch_name=batch_name)
|
286 |
+
return create_response(data=renamer(details,'subject','name'))
|
287 |
+
except Exception as e:
|
288 |
+
debugger.error(f"Error: {e}")
|
289 |
+
return create_response(error=str(e)), 500
|
290 |
+
|
291 |
+
@scraper_blueprint.route('/normal/chapters/<subject_slug>', methods=['GET'])
|
292 |
+
@scraper_blueprint.route('/api/normal/chapters/<subject_slug>', methods=['GET'])
|
293 |
+
def get_normal_chapters(subject_slug):
|
294 |
+
try:
|
295 |
+
batch_name = request.args.get('batch_name')
|
296 |
+
# batch_api.batch_name = batch_name
|
297 |
+
|
298 |
+
Global.hr()
|
299 |
+
debugger.success(f"batch_name: {batch_name}")
|
300 |
+
debugger.success(f"subject_slug: {subject_slug}")
|
301 |
+
Global.hr()
|
302 |
+
|
303 |
+
|
304 |
+
chapters = batch_api.process("subject",batch_name=batch_name,subject_name=subject_slug)
|
305 |
+
|
306 |
+
debugger.success(f"chapters: {chapters}")
|
307 |
+
|
308 |
+
return create_response(data=chapters)
|
309 |
+
except Exception as e:
|
310 |
+
|
311 |
+
debugger.error(e)
|
312 |
+
return create_response(error=str(e)), 500
|
313 |
+
|
314 |
+
@scraper_blueprint.route('/normal/lectures', methods=['GET'])
|
315 |
+
@scraper_blueprint.route('/api/normal/lectures', methods=['GET'])
|
316 |
+
@scraper_blueprint.route('/api/normal/videos', methods=['GET'])
|
317 |
+
def get_normal_videos():
|
318 |
+
try:
|
319 |
+
batch_name = request.args.get('batch_name',)
|
320 |
+
subject_slug = request.args.get('subject_slug')
|
321 |
+
chapter_slug = request.args.get('chapter_slug')
|
322 |
+
|
323 |
+
if not all([subject_slug, chapter_slug]):
|
324 |
+
return create_response(error="Missing required parameters"), 400
|
325 |
+
|
326 |
+
videos = batch_api.process("chapter",batch_name=batch_name,subject_name=subject_slug,chapter_name=chapter_slug)
|
327 |
+
return create_response(data=renamer(videos,'topic','name'))
|
328 |
+
except Exception as e:
|
329 |
+
return create_response(error=str(e)), 500
|
330 |
+
|
331 |
+
|
332 |
+
@scraper_blueprint.route(f'/api/batches')
|
333 |
+
def get_batches():
|
334 |
+
try:
|
335 |
+
batches = batch_api.get_batches_force_hard()
|
336 |
+
return create_response(data=batches)
|
337 |
+
except Exception as e:
|
338 |
+
return create_response(error=str(e)), 500
|
339 |
+
|
340 |
+
@scraper_blueprint.route('/api/set-token', methods=['POST'])
|
341 |
+
def set_token():
|
342 |
+
try:
|
343 |
+
token = request.json.get('token')
|
344 |
+
if not token:
|
345 |
+
return create_response(error="Token is required"), 400
|
346 |
+
batch_api.token = token
|
347 |
+
return create_response(data={"message": "Token updated successfully"})
|
348 |
+
except Exception as e:
|
349 |
+
return create_response(error=str(e)), 500
|
beta/api/blueprints/session_lodge.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
from flask import Blueprint, request, jsonify
|
4 |
+
from beta.api.api_dl import download_pw_video
|
5 |
+
from mainLogic.utils.gen_utils import generate_random_word
|
6 |
+
from beta.api.mr_manager.boss_manager import Boss
|
7 |
+
from mainLogic.utils.gen_utils import generate_safe_folder_name
|
8 |
+
|
9 |
+
|
10 |
+
session_lodge = Blueprint('session_lodge', __name__)
|
11 |
+
|
12 |
+
client_manager = Boss.client_manager
|
13 |
+
task_manager = Boss.task_manager
|
14 |
+
OUT_DIR = Boss.OUT_DIR
|
15 |
+
|
16 |
+
|
17 |
+
@session_lodge.route('/api/client/<client_id>/<session_id>/create_session', methods=['POST'])
|
18 |
+
@session_lodge.route('/client/<client_id>/<session_id>/create_session', methods=['POST'])
|
19 |
+
def create_session(client_id, session_id):
|
20 |
+
clients = client_manager.get_client_info(client_id)
|
21 |
+
data = request.json
|
22 |
+
from mainLogic.utils.gen_utils import generate_random_word
|
23 |
+
|
24 |
+
if not clients:
|
25 |
+
if 'client_name' in data: name = data['client_name']
|
26 |
+
else: name = f"{generate_random_word()}"
|
27 |
+
print(f"Creating client with ID {client_id} and name {name}")
|
28 |
+
client_manager.add_client(client_id,name)
|
29 |
+
|
30 |
+
if 'client_name' in data:
|
31 |
+
client_manager.set_client_name(client_id, data['client_name'])
|
32 |
+
|
33 |
+
session = client_manager.get_client_info(client_id).get('sessions', {}).get(session_id)
|
34 |
+
if not session:
|
35 |
+
client_manager.add_session(client_id, session_id)
|
36 |
+
sess_name = generate_random_word()
|
37 |
+
print(f"Generated session name: {sess_name}")
|
38 |
+
client_manager.set_session_name(client_id, session_id, sess_name)
|
39 |
+
|
40 |
+
|
41 |
+
data = request.json
|
42 |
+
ids = data.get('ids', [])
|
43 |
+
names = data.get('names', [])
|
44 |
+
batch_names = data.get('batch_names', [])
|
45 |
+
topic_names = data.get('topic_names', [])
|
46 |
+
lecture_urls = data.get('lecture_urls', [])
|
47 |
+
|
48 |
+
|
49 |
+
if not ids or not names:
|
50 |
+
return jsonify({'error': 'ids and names are required'}), 400
|
51 |
+
|
52 |
+
if len(ids) != len(names):
|
53 |
+
return jsonify({'error': 'ids and names must be of equal length'}), 400
|
54 |
+
|
55 |
+
if len(ids) != len(batch_names):
|
56 |
+
return jsonify({'error': 'ids and batch_names must be of equal length'}), 400
|
57 |
+
|
58 |
+
names_safe = [generate_safe_folder_name(name) for name in names]
|
59 |
+
names = names_safe
|
60 |
+
|
61 |
+
task_ids = []
|
62 |
+
|
63 |
+
for i in range(len(ids)):
|
64 |
+
id = ids[i]
|
65 |
+
name = names[i]
|
66 |
+
batch_name = batch_names[i]
|
67 |
+
print(f"Creating task for {name} with id {id}")
|
68 |
+
args = {
|
69 |
+
'name': name,
|
70 |
+
'id': id,
|
71 |
+
'batch_name': batch_name,
|
72 |
+
'topic_name': topic_names[i] if i < len(topic_names) else None,
|
73 |
+
'lecture_url': lecture_urls[i] if i < len(lecture_urls) else None,
|
74 |
+
'out_dir': OUT_DIR,
|
75 |
+
'client_id': client_id,
|
76 |
+
'session_id': session_id
|
77 |
+
}
|
78 |
+
task_id = task_manager.create_task(client_id, session_id, download_pw_video, args, inactive=True)
|
79 |
+
task_ids.append(task_id)
|
80 |
+
|
81 |
+
return jsonify({'task_ids': task_ids}), 202
|
82 |
+
|
83 |
+
|
84 |
+
@session_lodge.route('/api/start/<task_id>',methods=['GET','POST'])
|
85 |
+
@session_lodge.route('/start/<task_id>',methods=['GET','POST'])
|
86 |
+
def start_task(task_id):
|
87 |
+
try:
|
88 |
+
task_manager.start_task(task_id)
|
89 |
+
return jsonify({'success': True}), 200
|
90 |
+
except Exception as e:
|
91 |
+
print(e)
|
92 |
+
return jsonify({'error': str(e)}), 500
|
93 |
+
|
94 |
+
@session_lodge.route('/api/client/<client_id>/delete_client')
|
95 |
+
@session_lodge.route('/client/<client_id>/delete_client')
|
96 |
+
def delete_client_route(client_id):
|
97 |
+
client_manager.remove_client(client_id)
|
98 |
+
try:
|
99 |
+
import shutil
|
100 |
+
shutil.rmtree(f"{OUT_DIR}/{client_id}")
|
101 |
+
except Exception as e:
|
102 |
+
print(f"Could not delete client folder: {e}")
|
103 |
+
return jsonify({'message': f'Client with ID {client_id} deleted successfully'}), 200
|
104 |
+
|
105 |
+
@session_lodge.route('/api/client/<client_id>/<session_id>/delete_session')
|
106 |
+
@session_lodge.route('/client/<client_id>/<session_id>/delete_session')
|
107 |
+
def delete_session_route(client_id, session_id):
|
108 |
+
client_manager.remove_session(client_id, session_id)
|
109 |
+
try:
|
110 |
+
import shutil
|
111 |
+
shutil.rmtree(f"{OUT_DIR}/{client_id}/{session_id}")
|
112 |
+
except Exception as e:
|
113 |
+
print(f"Could not delete session folder: {e}")
|
114 |
+
return jsonify({'message': f'Session with ID {session_id} for client {client_id} deleted successfully'}), 200
|
115 |
+
|
116 |
+
@session_lodge.route('/api/client/<client_id>/merge_sessions',methods=['POST'])
|
117 |
+
@session_lodge.route('/client/<client_id>/merge_sessions', methods=['POST'])
|
118 |
+
def merge_sessions(client_id):
|
119 |
+
data = request.json
|
120 |
+
session_ids = data.get('session_ids', [])
|
121 |
+
if len(session_ids) > 2:
|
122 |
+
return jsonify({'error': 'Only two sessions can be merged at a time'}), 400
|
123 |
+
if not session_ids:
|
124 |
+
return jsonify({'error': 'session_ids is required'}), 400
|
125 |
+
try:
|
126 |
+
client_manager.merge_sessions(client_id, session_ids[0],session_ids[1])
|
127 |
+
|
128 |
+
# also move /webdl/<client>/session_id_2/* to /webdl/<client>/session_id_1
|
129 |
+
import shutil
|
130 |
+
for file in os.listdir(f"{OUT_DIR}/{client_id}/{session_ids[1]}"):
|
131 |
+
shutil.move(f"{OUT_DIR}/{client_id}/{session_ids[1]}/{file}", f"{OUT_DIR}/{client_id}/{session_ids[0]}/{file}")
|
132 |
+
shutil.rmtree(f"{OUT_DIR}/{client_id}/{session_ids[1]}")
|
133 |
+
|
134 |
+
|
135 |
+
return jsonify({'message': f'Sessions {session_ids} for client {client_id} merged successfully'}), 200
|
136 |
+
except Exception as e:
|
137 |
+
return jsonify({'error': str(e)}), 500
|
beta/api/blueprints/template_routes.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Blueprint, request, jsonify, render_template
|
2 |
+
|
3 |
+
from mainLogic.utils.glv import Global
|
4 |
+
|
5 |
+
template_blueprint = Blueprint('template_blueprint', __name__)
|
6 |
+
|
7 |
+
@template_blueprint.route('/')
|
8 |
+
def index():
|
9 |
+
return render_template('index.html')
|
10 |
+
|
11 |
+
@template_blueprint.route('/util')
|
12 |
+
def util():
|
13 |
+
return render_template('index.html')
|
14 |
+
|
15 |
+
@template_blueprint.route('/prefs')
|
16 |
+
def prefs():
|
17 |
+
return render_template('index.html')
|
18 |
+
|
19 |
+
@template_blueprint.route('/help')
|
20 |
+
def help():
|
21 |
+
return render_template('index.html')
|
22 |
+
|
23 |
+
@template_blueprint.route('/sessions')
|
24 |
+
def sessions():
|
25 |
+
return render_template('index.html')
|
26 |
+
|
27 |
+
@template_blueprint.route('/admin')
|
28 |
+
def admin():
|
29 |
+
return render_template('index.html')
|
30 |
+
|
31 |
+
@template_blueprint.route('/boss')
|
32 |
+
def boss():
|
33 |
+
return render_template('index.html')
|
34 |
+
|
35 |
+
@template_blueprint.route('/login')
|
36 |
+
def login():
|
37 |
+
return render_template('index.html')
|
38 |
+
|
39 |
+
@template_blueprint.route('/profile')
|
40 |
+
def profile():
|
41 |
+
return render_template('index.html')
|
42 |
+
|
43 |
+
# @template_blueprint.route('/manifest.json')
|
44 |
+
# def manifest():
|
45 |
+
# return render_template('manifest.json')
|
beta/api/blueprints/while_dl_and_post_dl.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
from flask import Blueprint, jsonify, send_file, render_template
|
4 |
+
from beta.api.mr_manager.boss_manager import Boss
|
5 |
+
from mainLogic.error import debugger
|
6 |
+
|
7 |
+
client_manager = Boss.client_manager
|
8 |
+
task_manager = Boss.task_manager
|
9 |
+
OUT_DIR = Boss.OUT_DIR
|
10 |
+
|
11 |
+
dl_and_post_dl = Blueprint('dl_and_post_dl', __name__)
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
@dl_and_post_dl.route('/api/progress/<task_id>', methods=['GET'])
|
16 |
+
@dl_and_post_dl.route('/progress/<task_id>', methods=['GET'])
|
17 |
+
def get_progress(task_id):
|
18 |
+
progress = task_manager.get_progress(task_id)
|
19 |
+
return jsonify(progress), 200
|
20 |
+
|
21 |
+
|
22 |
+
@dl_and_post_dl.route('/api/get-file/<task_id>/<name>', methods=['GET'])
|
23 |
+
@dl_and_post_dl.route('/get-file/<task_id>/<name>', methods=['GET'])
|
24 |
+
def get_file(task_id, name):
|
25 |
+
task_info = client_manager.get_progress(task_id)
|
26 |
+
|
27 |
+
if not task_info or task_info['status'] == 'not found':
|
28 |
+
debugger.error(f"File not found:")
|
29 |
+
return render_template("error.html",task_id=task_id,reason="not_found"), 404
|
30 |
+
|
31 |
+
client_session_dir = os.path.join(OUT_DIR, task_info['client_id'], task_info['session_id'])
|
32 |
+
|
33 |
+
file_path = os.path.join(client_session_dir, f"{name}-{task_id}.mp4")
|
34 |
+
|
35 |
+
def dict_to_tuple(d):
|
36 |
+
return tuple(d.values())
|
37 |
+
|
38 |
+
if not os.path.exists(file_path):
|
39 |
+
debugger.error(f"File not found: {file_path}")
|
40 |
+
return render_template("error.html",task_id=task_id,video_details=client_manager.get_task(task_id),reason='deleted'), 404
|
41 |
+
|
42 |
+
return send_file(file_path, as_attachment=True,download_name=f"{name}.mp4")
|
beta/api/mr_manager/boss_manager.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.api.mr_manager.client_manager import ClientManager
|
2 |
+
from beta.api.mr_manager.task_manager import TaskManager
|
3 |
+
from mainLogic.utils import glv_var
|
4 |
+
|
5 |
+
|
6 |
+
class Boss:
|
7 |
+
client_manager = ClientManager('clients.json')
|
8 |
+
task_manager = TaskManager(client_manager)
|
9 |
+
OUT_DIR = glv_var.api_webdl_directory
|
beta/api/mr_manager/client_manager.py
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
|
4 |
+
from mainLogic.utils.glv import Global
|
5 |
+
from mainLogic.utils.glv_var import debugger
|
6 |
+
|
7 |
+
|
8 |
+
class ClientManager:
|
9 |
+
def __init__(self, json_file_path):
|
10 |
+
self.json_file_path = json_file_path
|
11 |
+
self.clients = self.load_data()
|
12 |
+
|
13 |
+
def load_data(self):
|
14 |
+
if not os.path.exists(self.json_file_path):
|
15 |
+
return {}
|
16 |
+
try:
|
17 |
+
with open(self.json_file_path, 'r') as file:
|
18 |
+
return json.load(file)
|
19 |
+
except json.JSONDecodeError:
|
20 |
+
return {}
|
21 |
+
|
22 |
+
def save_data(self):
|
23 |
+
with open(self.json_file_path, 'w') as file:
|
24 |
+
json.dump(self.clients, file, indent=4)
|
25 |
+
|
26 |
+
def client_exists(self, client_id):
|
27 |
+
return client_id in self.clients
|
28 |
+
|
29 |
+
def session_exists(self, client_id, session_id):
|
30 |
+
return client_id in self.clients and session_id in self.clients[client_id]['sessions']
|
31 |
+
|
32 |
+
def add_client(self, client_id="anonymous",name=""):
|
33 |
+
if client_id not in self.clients:
|
34 |
+
self.clients[client_id] = {
|
35 |
+
"name": "Anonymous" if client_id == "anonymous" else name,
|
36 |
+
"client_id": client_id,
|
37 |
+
"sessions": {}
|
38 |
+
}
|
39 |
+
self.save_data()
|
40 |
+
else:
|
41 |
+
print(f"Client with ID {client_id} already exists.")
|
42 |
+
|
43 |
+
def remove_client(self, client_id):
|
44 |
+
if client_id in self.clients:
|
45 |
+
del self.clients[client_id]
|
46 |
+
self.save_data()
|
47 |
+
else:
|
48 |
+
print(f"Client with ID {client_id} does not exist.")
|
49 |
+
|
50 |
+
def set_client_name(self, client_id, name):
|
51 |
+
if client_id in self.clients:
|
52 |
+
self.clients[client_id]['name'] = name
|
53 |
+
self.save_data()
|
54 |
+
else:
|
55 |
+
print(f"Client with ID {client_id} does not exist.")
|
56 |
+
|
57 |
+
def add_session(self, client_id="anonymous", session_id="anonymous"):
|
58 |
+
from mainLogic.utils.gen_utils import generate_timestamp
|
59 |
+
if client_id in self.clients:
|
60 |
+
if session_id not in self.clients[client_id]['sessions']:
|
61 |
+
timestamp = generate_timestamp()
|
62 |
+
self.clients[client_id]['sessions'][session_id] = {"tasks": {}, "name": "", "timestamp": timestamp}
|
63 |
+
self.save_data()
|
64 |
+
else:
|
65 |
+
print(f"Session with ID {session_id} already exists for client {client_id}.")
|
66 |
+
else:
|
67 |
+
print(f"Client with ID {client_id} does not exist.")
|
68 |
+
|
69 |
+
def remove_session(self, client_id, session_id):
|
70 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
71 |
+
del self.clients[client_id]['sessions'][session_id]
|
72 |
+
self.save_data()
|
73 |
+
else:
|
74 |
+
print(f"Session with ID {session_id} does not exist for client {client_id}.")
|
75 |
+
|
76 |
+
def add_task(self, client_id, session_id, task_id, task_info):
|
77 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
78 |
+
self.clients[client_id]['sessions'][session_id]['tasks'][task_id] = task_info
|
79 |
+
self.save_data()
|
80 |
+
else:
|
81 |
+
print(f"Either client with ID {client_id} or session with ID {session_id} does not exist.")
|
82 |
+
|
83 |
+
def get_tasks(self,client_id,session_id=None):
|
84 |
+
|
85 |
+
# if session_id is empty return all tasks
|
86 |
+
if not session_id:
|
87 |
+
return self.clients[client_id]['sessions']
|
88 |
+
else:
|
89 |
+
return self.clients[client_id]['sessions'][session_id]['tasks']
|
90 |
+
|
91 |
+
def get_task(self,task_id):
|
92 |
+
for client_id in self.clients:
|
93 |
+
for session_id in self.clients[client_id]['sessions']:
|
94 |
+
if task_id in self.clients[client_id]['sessions'][session_id]['tasks']:
|
95 |
+
return self.clients[client_id]['sessions'][session_id]['tasks'][task_id]
|
96 |
+
return None
|
97 |
+
|
98 |
+
def update_task(self, task_info):
|
99 |
+
client_id = task_info['client_id']
|
100 |
+
session_id = task_info['session_id']
|
101 |
+
task_id = task_info['task_id']
|
102 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
103 |
+
if task_id in self.clients[client_id]['sessions'][session_id]['tasks']:
|
104 |
+
self.clients[client_id]['sessions'][session_id]['tasks'][task_id] = task_info
|
105 |
+
self.save_data()
|
106 |
+
else:
|
107 |
+
print(f"Task with ID {task_id} does not exist in session {session_id} for client {client_id}.")
|
108 |
+
else:
|
109 |
+
print(f"Either client with ID {client_id} or session with ID {session_id} does not exist.")
|
110 |
+
|
111 |
+
def remove_task(self, client_id, session_id, task_id):
|
112 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
113 |
+
if task_id in self.clients[client_id]['sessions'][session_id]['tasks']:
|
114 |
+
del self.clients[client_id]['sessions'][session_id]['tasks'][task_id]
|
115 |
+
self.save_data()
|
116 |
+
else:
|
117 |
+
print(f"Task with ID {task_id} does not exist in session {session_id} for client {client_id}.")
|
118 |
+
else:
|
119 |
+
print(f"Either client with ID {client_id} or session with ID {session_id} does not exist.")
|
120 |
+
|
121 |
+
def get_client_info(self, client_id):
|
122 |
+
if client_id in self.clients:
|
123 |
+
return self.clients[client_id]
|
124 |
+
else:
|
125 |
+
print(f"Client with ID {client_id} does not exist.")
|
126 |
+
return None
|
127 |
+
|
128 |
+
def get_progress(self,task_id):
|
129 |
+
task = self.get_task(task_id)
|
130 |
+
if task:
|
131 |
+
return task
|
132 |
+
else:
|
133 |
+
debugger.error(f"Task with ID {task_id} does not exist.")
|
134 |
+
return None
|
135 |
+
|
136 |
+
def set_session_name(self, client_id, session_id, name):
|
137 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
138 |
+
self.clients[client_id]['sessions'][session_id]['name'] = name
|
139 |
+
self.save_data()
|
140 |
+
else:
|
141 |
+
print(f"Either client with ID {client_id} or session with ID {session_id} does not exist.")
|
142 |
+
|
143 |
+
|
144 |
+
def delete_session(self, client_id, session_id):
|
145 |
+
if client_id in self.clients and session_id in self.clients[client_id]['sessions']:
|
146 |
+
del self.clients[client_id]['sessions'][session_id]
|
147 |
+
self.save_data()
|
148 |
+
else:
|
149 |
+
print(f"Session with ID {session_id} does not exist for client {client_id}.")
|
150 |
+
|
151 |
+
def merge_sessions(self, client_id, session_id_1, session_id_2):
|
152 |
+
if client_id in self.clients and session_id_1 in self.clients[client_id]['sessions'] and session_id_2 in self.clients[client_id]['sessions']:
|
153 |
+
|
154 |
+
# DEBUG
|
155 |
+
Global.hr()
|
156 |
+
print(f"Session 1: {session_id_1}")
|
157 |
+
print(f"Session 2: {session_id_2}")
|
158 |
+
print(f"Tasks in session 1: {json.dumps(self.clients[client_id]['sessions'][session_id_1]['tasks'], indent=4)}")
|
159 |
+
print(f"Tasks in session 2: {json.dumps(self.clients[client_id]['sessions'][session_id_2]['tasks'], indent=4)}")
|
160 |
+
Global.hr()
|
161 |
+
|
162 |
+
# Create a list of task IDs to delete to avoid changing dictionary size during iteration
|
163 |
+
tasks_to_delete = [task_id for task_id, task in self.clients[client_id]['sessions'][session_id_2]['tasks'].items()
|
164 |
+
if task['status'] in ['downloading', 'created', 'failed']]
|
165 |
+
|
166 |
+
# Delete the tasks from session_id_2
|
167 |
+
for task_id in tasks_to_delete:
|
168 |
+
debugger.debug(f"Deleting task {task_id}")
|
169 |
+
del self.clients[client_id]['sessions'][session_id_2]['tasks'][task_id]
|
170 |
+
|
171 |
+
# Move tasks from session_id_2 to session_id_1
|
172 |
+
self.clients[client_id]['sessions'][session_id_1]['tasks'].update(self.clients[client_id]['sessions'][session_id_2]['tasks'])
|
173 |
+
|
174 |
+
# Delete session_id_2
|
175 |
+
del self.clients[client_id]['sessions'][session_id_2]
|
176 |
+
self.save_data()
|
177 |
+
else:
|
178 |
+
print(f"Either client with ID {client_id} or session with ID {session_id_1} or {session_id_2} does not exist.")
|
179 |
+
|
180 |
+
|
181 |
+
def delete_client(self, client_id):
|
182 |
+
if client_id in self.clients:
|
183 |
+
del self.clients[client_id]
|
184 |
+
self.save_data()
|
185 |
+
else:
|
186 |
+
print(f"Client with ID {client_id} does not exist.")
|
beta/api/mr_manager/task_manager.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import threading
|
3 |
+
import uuid
|
4 |
+
|
5 |
+
from mainLogic.utils.glv import Global
|
6 |
+
from mainLogic.utils.glv_var import debugger
|
7 |
+
|
8 |
+
|
9 |
+
class TaskManager:
|
10 |
+
def __init__(self, client_manager):
|
11 |
+
self.tasks = {}
|
12 |
+
self.lock = threading.Lock()
|
13 |
+
self.client_manager = client_manager
|
14 |
+
self.inactive_tasks = {}
|
15 |
+
|
16 |
+
def handle_completion(self, task_id):
|
17 |
+
print(f"Task {task_id} completed")
|
18 |
+
with self.lock:
|
19 |
+
self.tasks[task_id]['status'] = 'completed'
|
20 |
+
self.client_manager.update_task(self.tasks[task_id])
|
21 |
+
|
22 |
+
on_task_complete = handle_completion
|
23 |
+
|
24 |
+
def create_task(self, client_id, session_id, target, *args, inactive=False):
|
25 |
+
Global.hr()
|
26 |
+
task_id = str(uuid.uuid4())
|
27 |
+
debugger.success(f"Args: {args}")
|
28 |
+
args_dict = args[0]
|
29 |
+
try:
|
30 |
+
name = args_dict['name']
|
31 |
+
id = args_dict['id']
|
32 |
+
batch_name = args_dict.get('batch_name', None)
|
33 |
+
topic_name = args_dict.get('topic_name', None)
|
34 |
+
lecture_url = args_dict.get('lecture_url', None)
|
35 |
+
out_dir = args_dict['out_dir']
|
36 |
+
except KeyError:
|
37 |
+
raise ValueError('name, id, batch_name and out_dir are required in args')
|
38 |
+
|
39 |
+
client_id = args_dict.get('client_id', client_id)
|
40 |
+
session_id = args_dict.get('session_id', session_id)
|
41 |
+
|
42 |
+
task_info = {
|
43 |
+
'task_id': task_id,
|
44 |
+
'progress': {
|
45 |
+
'progress': 0
|
46 |
+
},
|
47 |
+
'status': 'created' if inactive else 'running', # Set status to 'created' if inactive
|
48 |
+
'name': name,
|
49 |
+
'out_dir': out_dir,
|
50 |
+
'id': id,
|
51 |
+
'batch_name': batch_name,
|
52 |
+
'topic_name': topic_name,
|
53 |
+
'lecture_url': lecture_url,
|
54 |
+
'client_id': client_id,
|
55 |
+
'session_id': session_id
|
56 |
+
}
|
57 |
+
|
58 |
+
with self.lock:
|
59 |
+
self.tasks[task_id] = task_info
|
60 |
+
self.client_manager.add_task(client_id, session_id, task_id, task_info)
|
61 |
+
|
62 |
+
if not inactive:
|
63 |
+
thread = threading.Thread(target=self._run_task, args=(task_info, target, name, id, out_dir, client_id, session_id, *args[1:]))
|
64 |
+
thread.start()
|
65 |
+
else:
|
66 |
+
self.inactive_tasks[task_id] = {
|
67 |
+
'target': target,
|
68 |
+
'args': args,
|
69 |
+
'task_info': task_info
|
70 |
+
}
|
71 |
+
|
72 |
+
return task_id
|
73 |
+
|
74 |
+
def start_task(self, task_id):
|
75 |
+
with self.lock:
|
76 |
+
if task_id in self.tasks:
|
77 |
+
if self.tasks[task_id]['status'] == 'created':
|
78 |
+
task_info = self.tasks[task_id]
|
79 |
+
target = self._get_target_function(task_id) # Replace with your actual logic to retrieve the target function
|
80 |
+
thread = threading.Thread(target=self._run_task,
|
81 |
+
args=(
|
82 |
+
task_info, target,
|
83 |
+
task_info['name'],task_info['id'], task_info['batch_name'],task_info['topic_name'],task_info['lecture_url'],
|
84 |
+
task_info['out_dir'], task_info['client_id'], task_info['session_id']))
|
85 |
+
thread.start()
|
86 |
+
self.tasks[task_id]['status'] = 'running'
|
87 |
+
else:
|
88 |
+
raise ValueError(f"Task {task_id} is already running or completed.")
|
89 |
+
|
90 |
+
def _run_task(self, task_info, target, *args):
|
91 |
+
task_id = task_info['task_id']
|
92 |
+
try:
|
93 |
+
|
94 |
+
progress_callback = lambda progress: self._update_progress(task_id, progress)
|
95 |
+
debugger.debug(json.dumps([task_id, [*args], str(progress_callback)],indent=4))
|
96 |
+
target(task_id, *args, progress_callback)
|
97 |
+
with self.lock:
|
98 |
+
self.tasks[task_id]['url'] = f'/get-file/{task_id}/{self.tasks[task_id]["name"]}'
|
99 |
+
self.tasks[task_id]['status'] = 'completed'
|
100 |
+
self.client_manager.update_task(self.tasks[task_id])
|
101 |
+
except Exception as e:
|
102 |
+
debugger.info(f"Failed with error {e}")
|
103 |
+
with self.lock:
|
104 |
+
self.tasks[task_id]['status'] = 'failed'
|
105 |
+
self.tasks[task_id]['error'] = str(e)
|
106 |
+
self.client_manager.update_task(self.tasks[task_id])
|
107 |
+
|
108 |
+
def _update_progress(self, task_id, progress):
|
109 |
+
with self.lock:
|
110 |
+
if task_id in self.tasks:
|
111 |
+
self.tasks[task_id]['progress'] = progress
|
112 |
+
self.client_manager.update_task(self.tasks[task_id])
|
113 |
+
|
114 |
+
def get_progress(self, task_id):
|
115 |
+
with self.lock:
|
116 |
+
return self.tasks.get(task_id, {'status': 'not found'})
|
117 |
+
|
118 |
+
def _get_target_function(self, task_id):
|
119 |
+
if task_id in self.inactive_tasks:
|
120 |
+
return self.inactive_tasks[task_id]['target']
|
121 |
+
else:
|
122 |
+
raise ValueError(f"Task {task_id} is not inactive.")
|
123 |
+
|
124 |
+
|
beta/api/templates/base.html
ADDED
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8" />
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
6 |
+
<title>{% block title %}Default Title{% endblock %}</title>
|
7 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap" rel="stylesheet">
|
8 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css">
|
9 |
+
|
10 |
+
<style>
|
11 |
+
:root {
|
12 |
+
/* Color system */
|
13 |
+
--color-background: #0f172a;
|
14 |
+
--color-card: rgba(255, 255, 255, 0.08);
|
15 |
+
--color-text-primary: rgba(255, 255, 255, 0.95);
|
16 |
+
--color-text-secondary: rgba(255, 255, 255, 0.7);
|
17 |
+
--color-border: rgba(255, 255, 255, 0.15);
|
18 |
+
--color-primary: #3b82f6;
|
19 |
+
--color-primary-hover: #2563eb;
|
20 |
+
--color-destructive: #ef4444;
|
21 |
+
--color-warning: #f59e0b;
|
22 |
+
--color-muted: rgba(255, 255, 255, 0.45);
|
23 |
+
|
24 |
+
/* Spacing */
|
25 |
+
--space-1: 0.25rem;
|
26 |
+
--space-2: 0.5rem;
|
27 |
+
--space-3: 0.75rem;
|
28 |
+
--space-4: 1rem;
|
29 |
+
--space-5: 1.25rem;
|
30 |
+
--space-6: 1.5rem;
|
31 |
+
--space-8: 2rem;
|
32 |
+
--space-10: 2.5rem;
|
33 |
+
--space-12: 3rem;
|
34 |
+
|
35 |
+
/* Radius */
|
36 |
+
--radius-sm: 0.25rem;
|
37 |
+
--radius-md: 0.5rem;
|
38 |
+
--radius-lg: 0.75rem;
|
39 |
+
--radius-xl: 1rem;
|
40 |
+
|
41 |
+
/* Shadow */
|
42 |
+
--shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.2);
|
43 |
+
--shadow-md: 0 4px 6px -1px rgba(0, 0, 0, 0.3);
|
44 |
+
--shadow-lg: 0 10px 15px -3px rgba(0, 0, 0, 0.4);
|
45 |
+
|
46 |
+
/* Font sizes */
|
47 |
+
--font-xs: 0.75rem;
|
48 |
+
--font-sm: 0.875rem;
|
49 |
+
--font-md: 1rem;
|
50 |
+
--font-lg: 1.125rem;
|
51 |
+
--font-xl: 1.25rem;
|
52 |
+
--font-2xl: 1.5rem;
|
53 |
+
--font-3xl: 1.875rem;
|
54 |
+
}
|
55 |
+
|
56 |
+
* {
|
57 |
+
box-sizing: border-box;
|
58 |
+
margin: 0;
|
59 |
+
padding: 0;
|
60 |
+
}
|
61 |
+
|
62 |
+
body {
|
63 |
+
font-family: 'Inter', sans-serif;
|
64 |
+
background: linear-gradient(135deg, #1e3a8a, #172554);
|
65 |
+
color: var(--color-text-primary);
|
66 |
+
min-height: 100vh;
|
67 |
+
display: flex;
|
68 |
+
align-items: center;
|
69 |
+
justify-content: center;
|
70 |
+
padding: var(--space-6);
|
71 |
+
}
|
72 |
+
|
73 |
+
.container {
|
74 |
+
width: 100%;
|
75 |
+
max-width: 640px;
|
76 |
+
margin: 0 auto;
|
77 |
+
}
|
78 |
+
|
79 |
+
/* Card Components */
|
80 |
+
.error-card,
|
81 |
+
.moved-card {
|
82 |
+
background: var(--color-card);
|
83 |
+
border-radius: var(--radius-xl);
|
84 |
+
backdrop-filter: blur(10px);
|
85 |
+
border: 1px solid var(--color-border);
|
86 |
+
padding: var(--space-8);
|
87 |
+
box-shadow: var(--shadow-lg),
|
88 |
+
inset 0 1px 1px rgba(255, 255, 255, 0.1);
|
89 |
+
display: flex;
|
90 |
+
flex-direction: column;
|
91 |
+
gap: var(--space-6);
|
92 |
+
}
|
93 |
+
|
94 |
+
/* Header Components */
|
95 |
+
.error-header,
|
96 |
+
.moved-header {
|
97 |
+
display: flex;
|
98 |
+
align-items: center;
|
99 |
+
gap: var(--space-4);
|
100 |
+
}
|
101 |
+
|
102 |
+
.error-icon-wrapper,
|
103 |
+
.moved-icon-wrapper {
|
104 |
+
width: 56px;
|
105 |
+
height: 56px;
|
106 |
+
border-radius: 50%;
|
107 |
+
display: flex;
|
108 |
+
align-items: center;
|
109 |
+
justify-content: center;
|
110 |
+
flex-shrink: 0;
|
111 |
+
}
|
112 |
+
|
113 |
+
.error-icon-wrapper i,
|
114 |
+
.moved-icon-wrapper i {
|
115 |
+
font-size: var(--font-2xl);
|
116 |
+
}
|
117 |
+
|
118 |
+
/* Moved specific styles */
|
119 |
+
.moved-icon-wrapper {
|
120 |
+
background-color: var(--color-primary)20;
|
121 |
+
}
|
122 |
+
|
123 |
+
.moved-icon-wrapper i {
|
124 |
+
color: var(--color-primary);
|
125 |
+
}
|
126 |
+
|
127 |
+
.error-header h1 {
|
128 |
+
font-size: var(--font-2xl);
|
129 |
+
font-weight: 700;
|
130 |
+
line-height: 1.2;
|
131 |
+
}
|
132 |
+
|
133 |
+
/* Error Message */
|
134 |
+
.error-message,
|
135 |
+
.moved-message {
|
136 |
+
color: var(--color-text-secondary);
|
137 |
+
line-height: 1.6;
|
138 |
+
}
|
139 |
+
|
140 |
+
.error-message p,
|
141 |
+
.moved-message p {
|
142 |
+
margin-bottom: var(--space-2);
|
143 |
+
}
|
144 |
+
|
145 |
+
.error-message .reason {
|
146 |
+
color: var(--color-text-primary);
|
147 |
+
}
|
148 |
+
|
149 |
+
/* Task ID */
|
150 |
+
.task-id {
|
151 |
+
background: rgba(0, 0, 0, 0.2);
|
152 |
+
border-radius: var(--radius-md);
|
153 |
+
padding: var(--space-3) var(--space-4);
|
154 |
+
font-family: monospace;
|
155 |
+
font-size: var(--font-sm);
|
156 |
+
border: 1px solid var(--color-border);
|
157 |
+
}
|
158 |
+
|
159 |
+
.task-id span {
|
160 |
+
color: var(--color-text-secondary);
|
161 |
+
margin-right: var(--space-2);
|
162 |
+
}
|
163 |
+
|
164 |
+
/* Details Section */
|
165 |
+
.details-section {
|
166 |
+
margin-top: var(--space-2);
|
167 |
+
}
|
168 |
+
|
169 |
+
.details-header {
|
170 |
+
margin-bottom: var(--space-4);
|
171 |
+
}
|
172 |
+
|
173 |
+
.details-header h2 {
|
174 |
+
font-size: var(--font-lg);
|
175 |
+
font-weight: 600;
|
176 |
+
margin-bottom: var(--space-3);
|
177 |
+
}
|
178 |
+
|
179 |
+
.details-divider {
|
180 |
+
height: 1px;
|
181 |
+
background: var(--color-border);
|
182 |
+
margin-top: var(--space-2);
|
183 |
+
}
|
184 |
+
|
185 |
+
.details-list {
|
186 |
+
display: flex;
|
187 |
+
flex-direction: column;
|
188 |
+
gap: var(--space-4);
|
189 |
+
}
|
190 |
+
|
191 |
+
.details-item {
|
192 |
+
display: flex;
|
193 |
+
flex-direction: column;
|
194 |
+
gap: var(--space-1);
|
195 |
+
}
|
196 |
+
|
197 |
+
.details-item dt {
|
198 |
+
color: var(--color-text-secondary);
|
199 |
+
font-size: var(--font-sm);
|
200 |
+
font-weight: 500;
|
201 |
+
}
|
202 |
+
|
203 |
+
.details-item dd {
|
204 |
+
color: var(--color-text-primary);
|
205 |
+
word-break: break-word;
|
206 |
+
}
|
207 |
+
|
208 |
+
.details-item pre {
|
209 |
+
font-family: monospace;
|
210 |
+
font-size: var(--font-sm);
|
211 |
+
background: rgba(0, 0, 0, 0.2);
|
212 |
+
padding: var(--space-3);
|
213 |
+
border-radius: var(--radius-sm);
|
214 |
+
border: 1px solid var(--color-border);
|
215 |
+
overflow: auto;
|
216 |
+
margin-top: var(--space-2);
|
217 |
+
white-space: pre-wrap;
|
218 |
+
}
|
219 |
+
|
220 |
+
/* Button */
|
221 |
+
.primary-button {
|
222 |
+
display: inline-flex;
|
223 |
+
align-items: center;
|
224 |
+
justify-content: center;
|
225 |
+
background-color: var(--color-primary);
|
226 |
+
color: white;
|
227 |
+
font-weight: 500;
|
228 |
+
border-radius: var(--radius-md);
|
229 |
+
padding: var(--space-3) var(--space-6);
|
230 |
+
text-decoration: none;
|
231 |
+
transition: background-color 0.2s, transform 0.2s;
|
232 |
+
border: none;
|
233 |
+
cursor: pointer;
|
234 |
+
font-size: var(--font-md);
|
235 |
+
align-self: flex-start;
|
236 |
+
gap: var(--space-2);
|
237 |
+
}
|
238 |
+
|
239 |
+
.primary-button:hover {
|
240 |
+
background-color: var(--color-primary-hover);
|
241 |
+
transform: translateY(-1px);
|
242 |
+
}
|
243 |
+
|
244 |
+
/* Responsive adjustments */
|
245 |
+
@media (max-width: 640px) {
|
246 |
+
.error-card,
|
247 |
+
.moved-card {
|
248 |
+
padding: var(--space-6);
|
249 |
+
}
|
250 |
+
|
251 |
+
.error-header h1,
|
252 |
+
.moved-header h1 {
|
253 |
+
font-size: var(--font-xl);
|
254 |
+
}
|
255 |
+
|
256 |
+
.error-icon-wrapper,
|
257 |
+
.moved-icon-wrapper {
|
258 |
+
width: 48px;
|
259 |
+
height: 48px;
|
260 |
+
}
|
261 |
+
}
|
262 |
+
</style>
|
263 |
+
</head>
|
264 |
+
<body>
|
265 |
+
<div class="container">
|
266 |
+
{% block content %}{% endblock %}
|
267 |
+
</div>
|
268 |
+
</body>
|
269 |
+
</html>
|
beta/api/templates/error.html
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{% extends "base.html" %}
|
2 |
+
|
3 |
+
{% block title %}Video Error{% endblock %}
|
4 |
+
|
5 |
+
{% block content %}
|
6 |
+
{# Map reason to icon, color and message #}
|
7 |
+
{% set status_configs = {
|
8 |
+
'deleted': {
|
9 |
+
'icon': 'fa-circle-xmark',
|
10 |
+
'color': 'var(--color-destructive)',
|
11 |
+
'text': 'The video has been deleted.'
|
12 |
+
},
|
13 |
+
'incomplete': {
|
14 |
+
'icon': 'fa-hourglass-half',
|
15 |
+
'color': 'var(--color-warning)',
|
16 |
+
'text': 'Processing of the video is incomplete.'
|
17 |
+
},
|
18 |
+
'failed': {
|
19 |
+
'icon': 'fa-circle-exclamation',
|
20 |
+
'color': 'var(--color-destructive)',
|
21 |
+
'text': 'Processing of the video failed.'
|
22 |
+
},
|
23 |
+
'not_found':{
|
24 |
+
'icon': 'fa-circle-xmark',
|
25 |
+
'color': 'var(--color-destructive)',
|
26 |
+
'text': 'The video was not found.'
|
27 |
+
}
|
28 |
+
} %}
|
29 |
+
|
30 |
+
{# Get the configuration for the current reason, or use defaults #}
|
31 |
+
{% set config = status_configs.get(reason, {
|
32 |
+
'icon': 'fa-circle-exclamation',
|
33 |
+
'color': 'var(--color-warning)',
|
34 |
+
'text': reason if reason else 'Unknown error'
|
35 |
+
}) %}
|
36 |
+
|
37 |
+
<div class="error-card">
|
38 |
+
<div class="error-header">
|
39 |
+
<div class="error-icon-wrapper" style="background-color: {{ config.color }}20;">
|
40 |
+
<i class="fas {{ config.icon }}" style="color: {{ config.color }};"></i>
|
41 |
+
</div>
|
42 |
+
<h1>Video Unavailable</h1>
|
43 |
+
</div>
|
44 |
+
|
45 |
+
<div class="error-message">
|
46 |
+
<p>The video you requested could not be found or is currently unavailable.</p>
|
47 |
+
<p class="reason"><strong>{{ config.text }}</strong></p>
|
48 |
+
</div>
|
49 |
+
|
50 |
+
<div class="task-id">
|
51 |
+
<span>Task ID:</span> {{ task_id }}
|
52 |
+
</div>
|
53 |
+
|
54 |
+
{% if video_details and video_details.items() %}
|
55 |
+
<div class="details-section">
|
56 |
+
<div class="details-header">
|
57 |
+
<h2>Details</h2>
|
58 |
+
<div class="details-divider"></div>
|
59 |
+
</div>
|
60 |
+
|
61 |
+
<dl class="details-list">
|
62 |
+
{% for key, value in video_details.items() %}
|
63 |
+
<div class="details-item">
|
64 |
+
<dt>{{ key }}</dt>
|
65 |
+
<dd>
|
66 |
+
{% if value is mapping or (value is iterable and value is not string) %}
|
67 |
+
<pre>{{ value | tojson(indent=2) }}</pre>
|
68 |
+
{% else %}
|
69 |
+
{{ value }}
|
70 |
+
{% endif %}
|
71 |
+
</dd>
|
72 |
+
</div>
|
73 |
+
{% endfor %}
|
74 |
+
</dl>
|
75 |
+
</div>
|
76 |
+
{% endif %}
|
77 |
+
|
78 |
+
<a href="/" class="primary-button">Go Home</a>
|
79 |
+
</div>
|
80 |
+
{% endblock %}
|
beta/api/templates/index.html
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{% extends "base.html" %}
|
2 |
+
|
3 |
+
{% block title %}pwdl Web UI Moved{% endblock %}
|
4 |
+
|
5 |
+
{% block content %}
|
6 |
+
<div class="moved-card">
|
7 |
+
<div class="moved-header">
|
8 |
+
<div class="moved-icon-wrapper">
|
9 |
+
<i class="fas fa-arrow-right-long"></i>
|
10 |
+
</div>
|
11 |
+
<h1>pwdl Web UI Has Moved</h1>
|
12 |
+
</div>
|
13 |
+
|
14 |
+
<div class="moved-message">
|
15 |
+
<p>We've migrated to a new online home for a faster and smoother experience.</p>
|
16 |
+
</div>
|
17 |
+
|
18 |
+
<a href="https://pwdl-webui.vercel.app" target="_blank" class="primary-button">
|
19 |
+
<span>Visit pwdl-webui.vercel.app</span>
|
20 |
+
<i class="fas fa-external-link-alt"></i>
|
21 |
+
</a>
|
22 |
+
</div>
|
23 |
+
{% endblock %}
|
beta/batch_scraper_2/Endpoints.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Callable, List
|
2 |
+
|
3 |
+
from beta.util import extract_uuid, generate_safe_file_name
|
4 |
+
from mainLogic.utils.Endpoint import Endpoint
|
5 |
+
from mainLogic.utils.glv_var import debugger
|
6 |
+
|
7 |
+
|
8 |
+
class Endpoints:
|
9 |
+
def __init__(self, verbose=False):
|
10 |
+
self.verbose = verbose
|
11 |
+
self.DEFAULT_HEADERS = {
|
12 |
+
'client-id': '5eb393ee95fab7468a79d189',
|
13 |
+
# 'randomid': 'dbacc4f3-9024-491d-b2d3-72bd4b6ee281',
|
14 |
+
'client-type': 'WEB',
|
15 |
+
}
|
16 |
+
self.token = None
|
17 |
+
self.random_id = None
|
18 |
+
|
19 |
+
class API:
|
20 |
+
def __init__(self, outer):
|
21 |
+
self.outer = outer
|
22 |
+
self.base = "https://api.penpencil.co"
|
23 |
+
self.v1 = "v1"
|
24 |
+
self.v2 = "v2"
|
25 |
+
self.v3 = "v3"
|
26 |
+
self.hard_limit = 50
|
27 |
+
|
28 |
+
def url_details(self, batch_name):
|
29 |
+
return f"{self.base}/{self.v3}/batches/{batch_name}/details"
|
30 |
+
|
31 |
+
def url_subject(self, batch_name, subject_name):
|
32 |
+
return f"{self.base}/{self.v2}/batches/{batch_name}/subject/{subject_name}/topics?limit={self.hard_limit}"
|
33 |
+
|
34 |
+
def url_chapter(self, batch_name, subject_name, chapter_name):
|
35 |
+
return f"{self.base}/{self.v2}/batches/{batch_name}/subject/{subject_name}/contents?limit={self.hard_limit}&contentType=videos&tag={chapter_name}"
|
36 |
+
|
37 |
+
def url_notes(self, batch_name, subject_name, chapter_name):
|
38 |
+
return f"{self.base}/{self.v2}/batches/{batch_name}/subject/{subject_name}/contents?limit={self.hard_limit}&contentType=notes&tag={chapter_name}"
|
39 |
+
|
40 |
+
def url_lecture(self, lecture_id, batch_name):
|
41 |
+
return f"https://api.penpencil.co/{self.v1}/videos/video-url-details?type=BATCHES&childId={lecture_id}&parentId={batch_name}&reqType=query&videoContainerType=DASH"
|
42 |
+
|
43 |
+
def url_dpp_pdf(self, batch_name, subject_name, chapter_name):
|
44 |
+
return f"{self.base}/{self.v2}/batches/{batch_name}/subject/{subject_name}/contents?page=1&contentType=DppNotes&tag={chapter_name}&limit={self.hard_limit}"
|
45 |
+
|
46 |
+
def post_process(self, response: dict, keys_to_extract: List[str]):
|
47 |
+
try:
|
48 |
+
data = response
|
49 |
+
debugger.debug(f"Processing response:")
|
50 |
+
debugger.info(data)
|
51 |
+
for key in keys_to_extract:
|
52 |
+
if isinstance(data, dict) and key in data:
|
53 |
+
data = data[key]
|
54 |
+
else:
|
55 |
+
raise KeyError(f"Key '{key}' not found at level {key}")
|
56 |
+
if self.outer.verbose:
|
57 |
+
debugger.success(f"Successfully extracted data using keys {keys_to_extract}")
|
58 |
+
return data
|
59 |
+
except Exception as e:
|
60 |
+
debugger.error(f"Failed to extract data: {e}")
|
61 |
+
return []
|
62 |
+
|
63 |
+
class Khazana(API):
|
64 |
+
def __init__(self, outer):
|
65 |
+
super().__init__(outer)
|
66 |
+
|
67 |
+
def url_details(self, program_name):
|
68 |
+
return f"{self.base}/{self.v1}/programs/{program_name}/subjects?page=1&limit={self.hard_limit}"
|
69 |
+
|
70 |
+
def url_subject(self, program_name, subject_name):
|
71 |
+
# even though 'chapters' is returend it actually returns 'teachers'
|
72 |
+
return f"{self.base}/{self.v2}/programs/{program_name}/subjects/{subject_name}/chapters?page=1&limit={self.hard_limit}"
|
73 |
+
|
74 |
+
def url_topics(self, program_name, subject_name, teacher_name):
|
75 |
+
return f"{self.base}/{self.v1}/programs/{program_name}/subjects/{subject_name}/chapters/{teacher_name}/topics?page=1&limit={self.hard_limit}"
|
76 |
+
|
77 |
+
def url_sub_topic(self, program_name, subject_name, teacher_name, topic_name):
|
78 |
+
# actually returns 'chapters' by a specific teacher
|
79 |
+
# topic name is actually chapyter name
|
80 |
+
return f"{self.base}/{self.v1}/programs/{program_name}/subjects/{subject_name}/chapters/{teacher_name}/topics/{topic_name}/contents/sub-topic?page=1&limit={self.hard_limit}"
|
81 |
+
|
82 |
+
def url_chapter(self,program_name,subject_name,teacher_name,topic_name,sub_topic_name):
|
83 |
+
return f"{self.base}/{self.v2}/programs/contents?programId={program_name}&subjectId={subject_name}&chapterId={teacher_name}&topicId={topic_name}&subTopicId={sub_topic_name}&page=1&limit={self.hard_limit}"
|
84 |
+
|
85 |
+
def url_lecture(self,program_name,topic_name,lecture_id,lecture_url):
|
86 |
+
return (f"{self.base}/{self.v1}/videos/video-url-details?type=RECORDED&videoContainerType=DASH&reqType"
|
87 |
+
f"=query&childId={lecture_id}&parentId={program_name}&"
|
88 |
+
f"videoUrl={lecture_url}&secondaryParentId={topic_name}")
|
89 |
+
|
90 |
+
|
91 |
+
self.API = API(self)
|
92 |
+
self.Khazana = Khazana(self)
|
93 |
+
|
94 |
+
class Lambert:
|
95 |
+
def __init__(self, url_func: Callable, post_process_args: List[str], required_args: List[str]):
|
96 |
+
self.url_func = url_func
|
97 |
+
self.post_process_args = post_process_args
|
98 |
+
self.required_args = required_args
|
99 |
+
|
100 |
+
self.data_logs = {
|
101 |
+
"details": Lambert(self.API.url_details, ["data", "subjects"], ["batch_name"]),
|
102 |
+
"subject": Lambert(self.API.url_subject, ["data"], ["batch_name", "subject_name"]),
|
103 |
+
"chapter": Lambert(self.API.url_chapter, ["data"], ["batch_name", "subject_name", "chapter_name"]),
|
104 |
+
"notes" : Lambert(self.API.url_notes, ["data",], ["batch_name", "subject_name", "chapter_name"]),
|
105 |
+
"lecture": Lambert(self.API.url_lecture, ["data"], ["batch_name","lecture_id"]),
|
106 |
+
"dpp_pdf": Lambert(self.API.url_dpp_pdf, ["data"], ["batch_name", "subject_name", "chapter_name"]),
|
107 |
+
}
|
108 |
+
|
109 |
+
self.khazana_logs= {
|
110 |
+
"details" : Lambert(self.Khazana.url_details, ["data"], ["program_name"]),
|
111 |
+
"subject" : Lambert(self.Khazana.url_subject, ["data"], ["program_name", "subject_name"]),
|
112 |
+
"topics" : Lambert(self.Khazana.url_topics, ["data"], ["program_name", "subject_name", "teacher_name"]),
|
113 |
+
"sub_topic" : Lambert(self.Khazana.url_sub_topic, ["data"], ["program_name", "subject_name", "teacher_name", "topic_name"]),
|
114 |
+
"chapter" : Lambert(self.Khazana.url_chapter, ["data"], ["program_name", "subject_name", "teacher_name", "topic_name","sub_topic_name"]),
|
115 |
+
"lecture" : Lambert(self.Khazana.url_lecture, ["data"], ["program_name", "topic_name", "lecture_id", "lecture_url"]),
|
116 |
+
|
117 |
+
|
118 |
+
}
|
119 |
+
|
120 |
+
|
121 |
+
def set_token(self, token, random_id="a3e290fa-ea36-4012-9124-8908794c33aa"):
|
122 |
+
self.token = token
|
123 |
+
self.DEFAULT_HEADERS.setdefault('Authorization', 'Bearer ' + self.token)
|
124 |
+
if random_id:
|
125 |
+
self.random_id = random_id
|
126 |
+
self.DEFAULT_HEADERS['randomid'] = self.random_id
|
127 |
+
if self.verbose:
|
128 |
+
debugger.debug("Authorization token set successfully.")
|
129 |
+
return self
|
130 |
+
|
131 |
+
|
132 |
+
|
133 |
+
def process(self, type: str,khazana=False, **kwargs):
|
134 |
+
lambert = self.data_logs[type] if not khazana else self.khazana_logs[type]
|
135 |
+
|
136 |
+
missing_args = [arg for arg in lambert.required_args if arg not in kwargs]
|
137 |
+
if missing_args:
|
138 |
+
raise ValueError(f"Missing required arguments for '{type}': {missing_args}")
|
139 |
+
|
140 |
+
url = lambert.url_func(**kwargs)
|
141 |
+
if self.verbose:
|
142 |
+
debugger.debug(f"Fetching from URL: {url}")
|
143 |
+
|
144 |
+
endpoint = Endpoint(url, headers=self.DEFAULT_HEADERS)
|
145 |
+
fetched_response = endpoint.fetch()[0]
|
146 |
+
processed_data = self.API.post_process(fetched_response, lambert.post_process_args)
|
147 |
+
return processed_data
|
148 |
+
|
149 |
+
|
150 |
+
|
beta/obsolete/batch_scraper/Endpoints.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.util import extract_uuid, generate_safe_file_name
|
2 |
+
from mainLogic.utils.Endpoint import Endpoint
|
3 |
+
from mainLogic.utils.glv_var import debugger
|
4 |
+
|
5 |
+
|
6 |
+
class Endpoints:
|
7 |
+
DEFAULT_HEADERS = {
|
8 |
+
"randomId": "441c443a-2ab0-40da-86c1-885b88892094",
|
9 |
+
"Referer": "https://www.pw.live/",
|
10 |
+
"sec-ch-ua": '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"',
|
11 |
+
"sec-ch-ua-mobile": "?0",
|
12 |
+
"client-type": "WEB",
|
13 |
+
"client-id": "5eb393ee95fab7468a79d189",
|
14 |
+
"integration-with": "",
|
15 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
|
16 |
+
"Accept": "application/json, text/plain, */*",
|
17 |
+
"client-version": "6.0.6"
|
18 |
+
}
|
19 |
+
|
20 |
+
@staticmethod
|
21 |
+
def post_function_subjects_khazana(data):
|
22 |
+
return [{
|
23 |
+
'slug': subject['slug'],
|
24 |
+
'name': subject['name']
|
25 |
+
} for subject in data['data']] if isinstance(data, dict) and 'data' in data else []
|
26 |
+
|
27 |
+
@staticmethod
|
28 |
+
def post_function_batches_khazana(data):
|
29 |
+
return [{
|
30 |
+
'name': f"{chapter['name']} {chapter['description'].split(';')[0]}",
|
31 |
+
'slug': chapter['slug'],
|
32 |
+
'topics': chapter['totalTopics'],
|
33 |
+
'img': '' if 'imageId' not in chapter else (chapter['imageId']['baseUrl'] + chapter['imageId']['key'])
|
34 |
+
} for chapter in data['data']] if isinstance(data, dict) and 'data' in data else []
|
35 |
+
|
36 |
+
@staticmethod
|
37 |
+
def post_function_topics_khazana(data):
|
38 |
+
return [{
|
39 |
+
'name': chapter['name'],
|
40 |
+
'slug': chapter['slug'],
|
41 |
+
'id': chapter['_id'],
|
42 |
+
'video_count': chapter['totalLectures']
|
43 |
+
} for chapter in data['data']] if isinstance(data, dict) and 'data' in data else []
|
44 |
+
|
45 |
+
@staticmethod
|
46 |
+
def post_function_lectures_khazana(data):
|
47 |
+
return [{
|
48 |
+
'name': video['title'],
|
49 |
+
'url': video['content'][0]['videoUrl'],
|
50 |
+
'img': video['content'][0]["videoDetails"]["image"],
|
51 |
+
'duration': video['content'][0]["videoDetails"]["duration"]
|
52 |
+
} for video in data['data']] if isinstance(data, dict) and 'data' in data else []
|
53 |
+
|
54 |
+
@staticmethod
|
55 |
+
def post_function_subjects(data):
|
56 |
+
return [{
|
57 |
+
'slug': subject['slug'],
|
58 |
+
'name': subject['subject'],
|
59 |
+
'chapter_count': subject.get('tagCount', 0)
|
60 |
+
} for subject in data['data']['subjects']] if isinstance(data, dict) and 'data' in data and 'subjects' in data['data'] else []
|
61 |
+
|
62 |
+
@staticmethod
|
63 |
+
def post_function_chapters(data):
|
64 |
+
return [{
|
65 |
+
'name': chapter['name'],
|
66 |
+
'slug': chapter['slug'],
|
67 |
+
'video_count': chapter['videos']
|
68 |
+
} for chapter in data['data']] if isinstance(data, dict) and 'data' in data else []
|
69 |
+
|
70 |
+
@staticmethod
|
71 |
+
def post_function_videos(data):
|
72 |
+
return [{
|
73 |
+
'name': video['topic'],
|
74 |
+
'url': video['url'],
|
75 |
+
'img': video['videoDetails']['image'],
|
76 |
+
'duration': video['videoDetails']['duration']
|
77 |
+
} for video in data['data']] if isinstance(data, dict) and 'data' in data else []
|
78 |
+
|
79 |
+
@staticmethod
|
80 |
+
def post_function_videos_simple(data):
|
81 |
+
|
82 |
+
try:
|
83 |
+
return [{
|
84 |
+
'name': generate_safe_file_name(video['topic']),
|
85 |
+
'url': extract_uuid(video['url'])[0],
|
86 |
+
} for video in data['data']] if isinstance(data, dict) and 'data' in data else []
|
87 |
+
except Exception as e:
|
88 |
+
debugger.error(f"Error in post_function_videos_simple: {e}")
|
89 |
+
return []
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
@staticmethod
|
94 |
+
def post_function_batches(data):
|
95 |
+
return [{
|
96 |
+
'slug': batch['batchId']['slug'],
|
97 |
+
'img': '' if 'previewImage' not in batch['batchId'] else (batch['batchId']['previewImage']['baseUrl'] + batch['batchId']['previewImage']['key'])
|
98 |
+
} for batch in data['data']] if isinstance(data, dict) and 'data' in data else []
|
99 |
+
|
100 |
+
@staticmethod
|
101 |
+
def GET_KHAZANA_SUBJECTS_EP(batch_name):
|
102 |
+
return Endpoint(
|
103 |
+
url=f"https://api.penpencil.co/v1/programs/{batch_name}/subjects?page={{page}}",
|
104 |
+
method='GET',
|
105 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
106 |
+
post_function=Endpoints.post_function_subjects_khazana
|
107 |
+
)
|
108 |
+
|
109 |
+
@staticmethod
|
110 |
+
def GET_KHAZANA_BATCHES_EP(batch_name, subject_slug):
|
111 |
+
return Endpoint(
|
112 |
+
url=f"https://api.penpencil.co/v2/programs/{batch_name}/subjects/{subject_slug}/chapters?page={{page}}",
|
113 |
+
method='GET',
|
114 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
115 |
+
post_function=Endpoints.post_function_batches_khazana
|
116 |
+
)
|
117 |
+
|
118 |
+
@staticmethod
|
119 |
+
def GET_KHAZANA_CHAPTERS_EP(batch_name, subject_slug):
|
120 |
+
return Endpoint(
|
121 |
+
url=f"https://api.penpencil.co/v1/programs/{batch_name}/subjects/{subject_slug}/chapters/{subject_slug}/topics?page={{page}}",
|
122 |
+
method='GET',
|
123 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
124 |
+
post_function=Endpoints.post_function_topics_khazana
|
125 |
+
)
|
126 |
+
|
127 |
+
@staticmethod
|
128 |
+
def get_sub_topic_khazana(batch_name, subject_slug, topic_id):
|
129 |
+
return Endpoint(
|
130 |
+
url=f"https://api.penpencil.co/v1/programs/{batch_name}/subjects/{subject_slug}/chapters/{subject_slug}/topics/{topic_id}/contents/sub-topic?page={{page}}",
|
131 |
+
method='GET',
|
132 |
+
headers=Endpoints.DEFAULT_HEADERS.copy()
|
133 |
+
)
|
134 |
+
|
135 |
+
@staticmethod
|
136 |
+
def GET_KHAZANA_LECTURES_EP(batch_name, subject_slug, chapter_slug, topic_id, sub_topic_id):
|
137 |
+
return Endpoint(
|
138 |
+
url=f"https://api.penpencil.co/v2/programs/contents?type=&programId={batch_name}&subjectId={subject_slug}&chapterId={chapter_slug}&topicId={topic_id}&page={{page}}&subTopicId={sub_topic_id}",
|
139 |
+
method='GET',
|
140 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
141 |
+
post_function=Endpoints.post_function_lectures_khazana
|
142 |
+
)
|
143 |
+
|
144 |
+
@staticmethod
|
145 |
+
def GET_NORMAL_SUBJECTS_EP(batch_name):
|
146 |
+
return Endpoint(
|
147 |
+
url=f"https://api.penpencil.co/v3/batches/{batch_name}/details",
|
148 |
+
method='GET',
|
149 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
150 |
+
post_function=Endpoints.post_function_subjects
|
151 |
+
)
|
152 |
+
|
153 |
+
@staticmethod
|
154 |
+
def GET_NORMAL_CHAPTERS_EP(batch_name, subject_slug):
|
155 |
+
return Endpoint(
|
156 |
+
url=f"https://api.penpencil.co/v2/batches/{batch_name}/subject/{subject_slug}/topics?page={{page}}",
|
157 |
+
method='GET',
|
158 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
159 |
+
post_function=Endpoints.post_function_chapters
|
160 |
+
)
|
161 |
+
|
162 |
+
@staticmethod
|
163 |
+
def GET_NORMAL_LECTURES_EP(batch_name, subject_slug, chapter_slug,simple=False):
|
164 |
+
return Endpoint(
|
165 |
+
url=f"https://api.penpencil.co/v2/batches/{batch_name}/subject/{subject_slug}/contents?page={{page}}&contentType=videos&tag={chapter_slug}",
|
166 |
+
method='GET',
|
167 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
168 |
+
post_function=Endpoints.post_function_videos if not simple else Endpoints.post_function_videos_simple
|
169 |
+
)
|
170 |
+
|
171 |
+
@staticmethod
|
172 |
+
def get_batches_force_hard():
|
173 |
+
return Endpoint(
|
174 |
+
url="https://api.penpencil.co/v1/cohort/634fd2463ce3d7001c50798a/batches?page={page}&filter=true&tag=online&batchChildUrl=/batches-new/?tag=online&version=2",
|
175 |
+
method='GET',
|
176 |
+
headers=Endpoints.DEFAULT_HEADERS.copy(),
|
177 |
+
post_function=Endpoints.post_function_batches
|
178 |
+
)
|
beta/obsolete/batch_scraper/app.py
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
from tabnanny import verbose
|
3 |
+
|
4 |
+
from mainLogic.utils.Endpoint import Endpoint
|
5 |
+
from mainLogic.utils import glv_var
|
6 |
+
from mainLogic.utils.glv import Global
|
7 |
+
from beta.obsolete.batch_scraper.Endpoints import Endpoints
|
8 |
+
from mainLogic.utils.glv_var import debugger
|
9 |
+
|
10 |
+
|
11 |
+
class BatchAPI:
|
12 |
+
def __init__(self, batch_name: str, token: str, force=True, verbose=False):
|
13 |
+
self.batch_name = batch_name
|
14 |
+
self.token = token
|
15 |
+
self.force = force
|
16 |
+
|
17 |
+
|
18 |
+
def dataFromAPI(self, endpoint: Endpoint):
|
19 |
+
if self.force:
|
20 |
+
debugger.error("Forced to get token from stored prefs")
|
21 |
+
try:
|
22 |
+
self.token = glv_var.vars['prefs']['token']
|
23 |
+
except Exception as e:
|
24 |
+
debugger.error(f"Error: {e}")
|
25 |
+
self.token = None
|
26 |
+
raise ValueError("Token not found in prefs")
|
27 |
+
debugger.success(f"New Token: {self.token}")
|
28 |
+
|
29 |
+
if self.token and 'Authorization' not in endpoint.headers:
|
30 |
+
endpoint.headers['Authorization'] = f'Bearer {self.token}'
|
31 |
+
|
32 |
+
response_obj, status_code, response = endpoint.fetch()
|
33 |
+
|
34 |
+
if verbose:
|
35 |
+
Global.hr()
|
36 |
+
print(f"Debugging at {endpoint.url}")
|
37 |
+
debugger.success(f"Response: {response}")
|
38 |
+
print(f"Response Status Code: {status_code}")
|
39 |
+
print(f"Response Text: \n{json.dumps(response_obj)}")
|
40 |
+
Global.hr()
|
41 |
+
|
42 |
+
if endpoint.post_function:
|
43 |
+
return endpoint.post_function(response_obj)
|
44 |
+
return response_obj if status_code == 200 else response.text
|
45 |
+
|
46 |
+
def get_paginated_data(self, endpoint: Endpoint):
|
47 |
+
all_data = []
|
48 |
+
page = 1
|
49 |
+
|
50 |
+
while True:
|
51 |
+
paginated_endpoint = endpoint.__copy__()
|
52 |
+
paginated_endpoint.url = paginated_endpoint.url.format(page=page)
|
53 |
+
response = self.dataFromAPI(paginated_endpoint)
|
54 |
+
|
55 |
+
if isinstance(response, str):
|
56 |
+
break
|
57 |
+
if not response:
|
58 |
+
break
|
59 |
+
|
60 |
+
all_data.extend(response)
|
61 |
+
|
62 |
+
page += 1
|
63 |
+
|
64 |
+
if paginated_endpoint.url == endpoint.url:
|
65 |
+
break
|
66 |
+
|
67 |
+
return all_data
|
68 |
+
|
69 |
+
def GET_KHAZANA_SUBJECTS(self):
|
70 |
+
return self.get_paginated_data(Endpoints.GET_KHAZANA_SUBJECTS_EP(self.batch_name))
|
71 |
+
|
72 |
+
def GET_KHAZANA_BATCHES(self, kh_subject_slug):
|
73 |
+
return self.get_paginated_data(Endpoints.GET_KHAZANA_BATCHES_EP(self.batch_name, kh_subject_slug))
|
74 |
+
|
75 |
+
def GET_KHAZANA_CHAPTERS(self, subject_slug_kh):
|
76 |
+
return self.get_paginated_data(Endpoints.GET_KHAZANA_CHAPTERS_EP(self.batch_name, subject_slug_kh))
|
77 |
+
|
78 |
+
def GET_KHAZANA_LECTURES(self, subject_slug_kh, chapter_slug_kh, topic_id):
|
79 |
+
# First get sub_topic_id
|
80 |
+
Global.hr()
|
81 |
+
print(f"Fetching sub-topic for Batch: {self.batch_name}, Subject: {subject_slug_kh}, Topic: {topic_id}")
|
82 |
+
sub_topic_response = self.dataFromAPI(
|
83 |
+
Endpoints.get_sub_topic_khazana(self.batch_name, subject_slug_kh, topic_id)
|
84 |
+
)
|
85 |
+
|
86 |
+
if 'data' in sub_topic_response:
|
87 |
+
sub_topic_response = sub_topic_response['data']
|
88 |
+
else:
|
89 |
+
# Error handling
|
90 |
+
debugger.error(f"No data found in response @ khazana lectures: {sub_topic_response}")
|
91 |
+
return []
|
92 |
+
|
93 |
+
|
94 |
+
sub_topic_id = sub_topic_response[0]['_id'] if sub_topic_response else None
|
95 |
+
print(f"Sub-topic ID: {sub_topic_id}")
|
96 |
+
|
97 |
+
|
98 |
+
|
99 |
+
if not sub_topic_id:
|
100 |
+
debugger.error("No sub-topic ID found")
|
101 |
+
return []
|
102 |
+
|
103 |
+
Global.hr()
|
104 |
+
print(f"Fetching lectures for Sub-topic ID: {sub_topic_id}")
|
105 |
+
return self.get_paginated_data(
|
106 |
+
Endpoints.GET_KHAZANA_LECTURES_EP(self.batch_name, subject_slug_kh, chapter_slug_kh, topic_id, sub_topic_id)
|
107 |
+
)
|
108 |
+
|
109 |
+
def GET_NORMAL_SUBJECTS(self):
|
110 |
+
return self.dataFromAPI(Endpoints.GET_NORMAL_SUBJECTS_EP(self.batch_name))
|
111 |
+
|
112 |
+
def GET_NORMAL_CHAPTERS(self, subject_slug):
|
113 |
+
Global.hr()
|
114 |
+
print(f"Batch: {self.batch_name}")
|
115 |
+
print(f"Subject: {subject_slug}")
|
116 |
+
Global.hr()
|
117 |
+
|
118 |
+
return self.get_paginated_data(
|
119 |
+
Endpoints.GET_NORMAL_CHAPTERS_EP(self.batch_name, subject_slug)
|
120 |
+
)
|
121 |
+
|
122 |
+
def GET_NORMAL_LECTURES(self, subject_slug, chapter_slug, simple=False):
|
123 |
+
return self.get_paginated_data(
|
124 |
+
Endpoints.GET_NORMAL_LECTURES_EP(self.batch_name, subject_slug, chapter_slug,simple=simple)
|
125 |
+
)
|
126 |
+
|
127 |
+
def get_batches_force_hard(self):
|
128 |
+
return self.get_paginated_data(Endpoints.get_batches_force_hard())
|
129 |
+
|
130 |
+
@staticmethod
|
131 |
+
def to_table(list_of_data):
|
132 |
+
try:
|
133 |
+
headers = list_of_data[0].keys()
|
134 |
+
data = [list(row.values()) for row in list_of_data]
|
135 |
+
return headers, data
|
136 |
+
except Exception as e:
|
137 |
+
debugger.error(f"Error: {e}")
|
138 |
+
return [], []
|
beta/question_scraper/Endpoints.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
from mainLogic.utils.Endpoint import Endpoint
|
4 |
+
|
5 |
+
|
6 |
+
class Endpoints:
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
DEFAULT_HEADERS = {
|
13 |
+
'accept': 'application/json, text/plain, */*',
|
14 |
+
'accept-language': 'en-US,en;q=0.9,la;q=0.8',
|
15 |
+
'client-id': '5eb393ee95fab7468a79d189',
|
16 |
+
'client-type': 'WEB',
|
17 |
+
'client-version': '300',
|
18 |
+
'content-type': 'application/json',
|
19 |
+
'dnt': '1',
|
20 |
+
'origin': 'https://pw-infinite-practise.pw.live',
|
21 |
+
'priority': 'u=1, i',
|
22 |
+
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36',
|
23 |
+
|
24 |
+
}
|
25 |
+
|
26 |
+
|
27 |
+
@staticmethod
|
28 |
+
def GET_QUESTIONS_EP(
|
29 |
+
subject_id = '734chcfe1nhx5ay0lh0n5d4qj',
|
30 |
+
chapters = [{'chapterId': 'dlfqana46gj8kjshdzwwlhb7p', 'classId': 'oyhh7ve8217so92jw81tefbyp'}],
|
31 |
+
difficulty_level = [3],
|
32 |
+
questions_count = 90,
|
33 |
+
):
|
34 |
+
return Endpoint(
|
35 |
+
url='https://api.penpencil.co/v3/test-service/65d75d320531c20018ade9bb/infinitePractice/v2/start-test',
|
36 |
+
method='POST',
|
37 |
+
payload=json.dumps({
|
38 |
+
"exams": [
|
39 |
+
"7d5erv0sihqah96p8noqgbxkp"
|
40 |
+
],
|
41 |
+
"examCategory": "vckzned6mqjlkub8wsfh605rp",
|
42 |
+
"testMode": "PRACTICE",
|
43 |
+
"questionsCount": questions_count,
|
44 |
+
"chapters": chapters,
|
45 |
+
"subject": subject_id,
|
46 |
+
"difficultyLevel": [
|
47 |
+
3
|
48 |
+
],
|
49 |
+
"isReattempt": False
|
50 |
+
}),
|
51 |
+
headers=Endpoints.DEFAULT_HEADERS,
|
52 |
+
post_function=lambda data: data['data']
|
53 |
+
)
|
54 |
+
|
55 |
+
@staticmethod
|
56 |
+
def GET_SUBJECTS_BATCH_EP(batch_id='65d75d320531c20018ade9bb'):
|
57 |
+
return Endpoint(
|
58 |
+
url=f'https://api.penpencil.co/v3/batches/{batch_id}/infinitePractice/subjects',
|
59 |
+
method='GET',
|
60 |
+
headers=Endpoints.DEFAULT_HEADERS,
|
61 |
+
post_function=lambda data: data['data']
|
62 |
+
)
|
63 |
+
|
64 |
+
@staticmethod
|
65 |
+
def GET_CHAPTERS_EP(batch_id='65d75d320531c20018ade9bb', subject_id='734chcfe1nhx5ay0lh0n5d4qj'):
|
66 |
+
return Endpoint(
|
67 |
+
url=f'https://api.penpencil.co/v3/batches/{batch_id}/infinitePractice/chapters?subjectId={subject_id}',
|
68 |
+
method='GET',
|
69 |
+
headers=Endpoints.DEFAULT_HEADERS,
|
70 |
+
post_function=lambda data: data['data']
|
71 |
+
)
|
72 |
+
|
beta/question_scraper/app.py
ADDED
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
from tabnanny import verbose
|
3 |
+
|
4 |
+
from beta.question_scraper.Endpoints import Endpoints
|
5 |
+
from mainLogic.utils import glv_var
|
6 |
+
from mainLogic.utils.Endpoint import Endpoint
|
7 |
+
from mainLogic.utils.glv import Global
|
8 |
+
from mainLogic.utils.glv_var import debugger
|
9 |
+
|
10 |
+
|
11 |
+
class QuestionsAPI:
|
12 |
+
|
13 |
+
def __init__(self, token: str,random_id:str, force=True, verbose=False):
|
14 |
+
self.token = token
|
15 |
+
self.random_id = random_id
|
16 |
+
self.force = force
|
17 |
+
self.verbose = verbose
|
18 |
+
|
19 |
+
def dataFromAPI(self, endpoint: Endpoint):
|
20 |
+
if self.force:
|
21 |
+
debugger.error("Forced to get token from stored prefs")
|
22 |
+
try:
|
23 |
+
self.token = glv_var.vars['prefs']['token']
|
24 |
+
self.random_id = glv_var.vars['prefs']['random_id']
|
25 |
+
except Exception as e:
|
26 |
+
debugger.error(f"Error: {e}")
|
27 |
+
self.token = None
|
28 |
+
raise ValueError("Token not found in prefs")
|
29 |
+
debugger.success(f"New Token: {self.token}")
|
30 |
+
|
31 |
+
if self.token and 'Authorization' not in endpoint.headers:
|
32 |
+
endpoint.headers['Authorization'] = f'Bearer {self.token}'
|
33 |
+
if self.random_id and 'random_id' not in endpoint.headers:
|
34 |
+
# endpoint.headers['random_id'] = self.random_id
|
35 |
+
endpoint.headers['randomid'] = self.random_id
|
36 |
+
|
37 |
+
if self.verbose:
|
38 |
+
debugger.success(f"Headers: {endpoint.headers}")
|
39 |
+
debugger.success(f"Payload: {endpoint.payload}")
|
40 |
+
|
41 |
+
|
42 |
+
response_obj, status_code, response = endpoint.fetch()
|
43 |
+
|
44 |
+
if self.verbose:
|
45 |
+
Global.hr()
|
46 |
+
print(f"Debugging at {endpoint.url}")
|
47 |
+
debugger.success(f"Response: {response}")
|
48 |
+
print(f"Response Status Code: {status_code}")
|
49 |
+
print(f"Response Text: \n{json.dumps(response_obj)}")
|
50 |
+
Global.hr()
|
51 |
+
|
52 |
+
if endpoint.post_function:
|
53 |
+
return endpoint.post_function(response_obj)
|
54 |
+
return response_obj if status_code == 200 else response.text
|
55 |
+
|
56 |
+
def get_paginated_data(self, endpoint: Endpoint):
|
57 |
+
all_data = []
|
58 |
+
page = 1
|
59 |
+
|
60 |
+
while True:
|
61 |
+
paginated_endpoint = endpoint.__copy__()
|
62 |
+
paginated_endpoint.url = paginated_endpoint.url.format(page=page)
|
63 |
+
response = self.dataFromAPI(paginated_endpoint)
|
64 |
+
|
65 |
+
if isinstance(response, str):
|
66 |
+
break
|
67 |
+
if not response:
|
68 |
+
break
|
69 |
+
|
70 |
+
all_data.extend(response)
|
71 |
+
page += 1
|
72 |
+
|
73 |
+
if paginated_endpoint.url == endpoint.url:
|
74 |
+
break
|
75 |
+
|
76 |
+
return all_data
|
77 |
+
|
78 |
+
def GET_SUBJECTS(self):
|
79 |
+
return self.dataFromAPI(Endpoints.GET_SUBJECTS_BATCH_EP())
|
80 |
+
|
81 |
+
def GET_CHAPTERS(self, batch_id=None, subject_id=None):
|
82 |
+
|
83 |
+
return self.dataFromAPI(Endpoints.GET_CHAPTERS_EP(subject_id=subject_id))
|
84 |
+
|
85 |
+
def GET_QUESTION(self, subject_id=None, chapters=None, difficulty_level=None, questions_count=None):
|
86 |
+
return self.dataFromAPI(Endpoints.GET_QUESTIONS_EP(subject_id=subject_id, chapters=chapters, difficulty_level=difficulty_level, questions_count=questions_count))
|
beta/shellLogic/Plugin.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import shutil
|
2 |
+
import re
|
3 |
+
from beta.shellLogic import simpleParser
|
4 |
+
|
5 |
+
class Plugin:
|
6 |
+
"""
|
7 |
+
Base class for handling commands.
|
8 |
+
Automatically registers commands added via `add_command`.
|
9 |
+
"""
|
10 |
+
|
11 |
+
global_command_list = {} # Global registry for all commands
|
12 |
+
|
13 |
+
def __init__(self):
|
14 |
+
self.commandList = {}
|
15 |
+
self.register_commands()
|
16 |
+
|
17 |
+
def register_commands(self):
|
18 |
+
"""
|
19 |
+
Registers commands from the subclass into the global command list.
|
20 |
+
"""
|
21 |
+
for name, info in self.commandList.items():
|
22 |
+
Plugin.global_command_list[name] = {
|
23 |
+
"desc": info["desc"],
|
24 |
+
"regex": info["regex"],
|
25 |
+
"func": info["func"]
|
26 |
+
}
|
27 |
+
|
28 |
+
def parseAndRun(self, command, args=[]):
|
29 |
+
"""
|
30 |
+
Parses and executes the given command if found in the global list.
|
31 |
+
"""
|
32 |
+
if command not in Plugin.global_command_list:
|
33 |
+
print(f"Command '{command}' not found.")
|
34 |
+
return
|
35 |
+
simpleParser.parseAndRun(Plugin.global_command_list, command, args)
|
36 |
+
|
37 |
+
def add_command(self, name, desc, regex, func):
|
38 |
+
"""
|
39 |
+
Adds a new command to the local command list.
|
40 |
+
"""
|
41 |
+
self.commandList[name] = {
|
42 |
+
"desc": desc,
|
43 |
+
"regex": regex,
|
44 |
+
"func": func
|
45 |
+
}
|
46 |
+
|
47 |
+
def get_help(self, command=""):
|
48 |
+
terminal_width = shutil.get_terminal_size().columns
|
49 |
+
help_text = "Available commands:\n"
|
50 |
+
separator = '-' * terminal_width + '\n'
|
51 |
+
|
52 |
+
if command:
|
53 |
+
if command not in Plugin.global_command_list:
|
54 |
+
return f"Command '{command}' not found."
|
55 |
+
return f"{command}: {Plugin.global_command_list[command]['desc']}"
|
56 |
+
|
57 |
+
help_text += separator
|
58 |
+
max_command_length = max(len(cmd) for cmd in Plugin.global_command_list) if Plugin.global_command_list else 0
|
59 |
+
padding = max_command_length + 4
|
60 |
+
|
61 |
+
for cmd, info in Plugin.global_command_list.items():
|
62 |
+
help_text += f"{cmd.ljust(padding)}: {info['desc']}\n"
|
63 |
+
|
64 |
+
help_text += separator
|
65 |
+
return help_text
|
66 |
+
|
67 |
+
def help(self, command=""):
|
68 |
+
"""
|
69 |
+
Displays help text for a command or all commands.
|
70 |
+
"""
|
71 |
+
print(self.get_help(command))
|
72 |
+
|
73 |
+
def match_command(self, command):
|
74 |
+
"""
|
75 |
+
Matches a command using regex patterns from the global command list.
|
76 |
+
"""
|
77 |
+
for key, info in Plugin.global_command_list.items():
|
78 |
+
if re.match(info["regex"], command):
|
79 |
+
return key
|
80 |
+
return None
|
beta/shellLogic/TokenUpdate.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from mainLogic.utils.glv import Global
|
2 |
+
from mainLogic.utils.glv_var import PREFS_FILE, debugger
|
3 |
+
from beta.update import UpdateJSONFile
|
4 |
+
class TokenUpdate:
|
5 |
+
|
6 |
+
def __init__(self):
|
7 |
+
|
8 |
+
self.file_path = PREFS_FILE
|
9 |
+
# hard coding 'defaults.json' as to ../../defaults.json
|
10 |
+
#debugger.error("Warning! This is a beta feature. Use at your own risk.")
|
11 |
+
#debugger.error("Hard Coded to use 'defaults.json' as to ../../defaults.json (in Global.PREFERENCES_FILE)")
|
12 |
+
self.commandList = {
|
13 |
+
"tkn-up":{
|
14 |
+
"func": self.update
|
15 |
+
}
|
16 |
+
}
|
17 |
+
|
18 |
+
def update(self,args=[]):
|
19 |
+
if args:
|
20 |
+
u = UpdateJSONFile(self.file_path)
|
21 |
+
u.update('token',args[0])
|
22 |
+
debugger.success("Token updated successfully.")
|
23 |
+
else:
|
24 |
+
debugger.error("Please provide a token to update.")
|
25 |
+
|
26 |
+
def parseAndRun(self,command,args=[]):
|
27 |
+
# simpleParser.parseAndRun(self.commandList, command, args)
|
28 |
+
if command in self.commandList:
|
29 |
+
self.commandList[command]["func"](args)
|
30 |
+
else:
|
31 |
+
debugger.error("Command not found.")
|
32 |
+
|
33 |
+
|
34 |
+
|
beta/shellLogic/handleLogics/HandleBasicCMDUtils.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
from beta.shellLogic.Plugin import Plugin
|
4 |
+
from mainLogic.utils.os2 import SysFunc
|
5 |
+
|
6 |
+
os2 = SysFunc()
|
7 |
+
|
8 |
+
class HandleBasicCMDUtils(Plugin):
|
9 |
+
def __init__(self):
|
10 |
+
super().__init__()
|
11 |
+
self.commandList = {
|
12 |
+
"cls": {
|
13 |
+
"desc": "Clear the screen",
|
14 |
+
"regex": r"cls",
|
15 |
+
"func": self.cls
|
16 |
+
},
|
17 |
+
"cd": {
|
18 |
+
"desc": "Change directory",
|
19 |
+
"regex": r"cd",
|
20 |
+
"func": self.cd
|
21 |
+
},
|
22 |
+
"cmd": {
|
23 |
+
"desc": "Run a command",
|
24 |
+
"regex": r"cmd",
|
25 |
+
"func": self.cmd
|
26 |
+
},
|
27 |
+
"exit": {
|
28 |
+
"desc": "Exit the shell",
|
29 |
+
"regex": r"exit",
|
30 |
+
"func": self.exit_shell
|
31 |
+
}
|
32 |
+
}
|
33 |
+
self.register_commands()
|
34 |
+
|
35 |
+
def cls(self, args=[]):
|
36 |
+
os2.clear()
|
37 |
+
if args:
|
38 |
+
print(args)
|
39 |
+
|
40 |
+
def exit_shell(self, args=[]):
|
41 |
+
sys.exit(0)
|
42 |
+
|
43 |
+
def cd(self, args=[]):
|
44 |
+
if args:
|
45 |
+
os2.cd(args[0])
|
46 |
+
else:
|
47 |
+
os2.cd()
|
48 |
+
|
49 |
+
def cmd(self, args=[]):
|
50 |
+
import os
|
51 |
+
os.system(" ".join(args))
|
beta/shellLogic/handleLogics/HandleBatch.py
ADDED
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
from beta.obsolete.batch_scraper.app import BatchAPI
|
3 |
+
from beta.shellLogic.Plugin import Plugin
|
4 |
+
from mainLogic.utils.dependency_checker import re_check_dependencies
|
5 |
+
from mainLogic.utils.glv_var import debugger
|
6 |
+
|
7 |
+
|
8 |
+
class HandleBatch(Plugin):
|
9 |
+
def __init__(self):
|
10 |
+
super().__init__()
|
11 |
+
self.add_command("bgen", "Generate entire batch data", "--generate", self.generate)
|
12 |
+
self.register_commands()
|
13 |
+
|
14 |
+
def generate(self, args):
|
15 |
+
parser = argparse.ArgumentParser(description="Generate batch data")
|
16 |
+
parser.add_argument("--khazana", "-K", type=str, default="")
|
17 |
+
parser.add_argument("--batch-slug", "-B", type=str, default="")
|
18 |
+
parser.add_argument("--subject-slug", "-S", type=str, default="")
|
19 |
+
parser.add_argument("--chapter-slug", "-C", type=str, default="")
|
20 |
+
parser.add_argument("--export", "-E", type=str, default="")
|
21 |
+
parser.add_argument("--simple", action="store_true", default=False)
|
22 |
+
parser.add_argument("--recursive", action="store_true", default=False)
|
23 |
+
|
24 |
+
opts = parser.parse_args(args)
|
25 |
+
|
26 |
+
state, prefs = re_check_dependencies(reload=True)
|
27 |
+
token = prefs['token']
|
28 |
+
|
29 |
+
|
30 |
+
def to_csv_str(data: list):
|
31 |
+
headers = data[0].keys()
|
32 |
+
rows = [list(item.values()) for item in data]
|
33 |
+
output = []
|
34 |
+
output.append(','.join(headers))
|
35 |
+
for row in rows:
|
36 |
+
output.append(','.join(map(str, row)))
|
37 |
+
return '\n'.join(output)
|
38 |
+
|
39 |
+
|
40 |
+
def get_khazana_data(khazana_name):
|
41 |
+
api = BatchAPI(khazana_name, token, force=False, verbose=False)
|
42 |
+
res = api.GET_KHAZANA_SUBJECTS()
|
43 |
+
if not res:
|
44 |
+
debugger.error(f"Batch {khazana_name} not found.")
|
45 |
+
return
|
46 |
+
else:
|
47 |
+
debugger.info(f"Batch {khazana_name} found.")
|
48 |
+
debugger.info(f"Batch data:\n{to_csv_str(res)}")
|
49 |
+
return res
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
# TO NOTE
|
56 |
+
# In khazana flow, we use GET_KHAZANA_BATCHES when we actually want to get SUBJECTS
|
57 |
+
# PREVIOUSLY we called them BATCHES as each khazana is a super class of subjects with batches inside of each subject
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
# SUPER SUBJECT is a collection of teachers teaching the smae subject in different batches
|
64 |
+
# data inside super subject
|
65 |
+
def KHAZANA_GET_SUPER_SUBJECT_DATA(khazana_name, super_subject_name):
|
66 |
+
api = BatchAPI(khazana_name, token, force=False, verbose=False)
|
67 |
+
|
68 |
+
res = api.GET_KHAZANA_BATCHES(super_subject_name)
|
69 |
+
|
70 |
+
# if super subject doe snot exixst
|
71 |
+
if not res:
|
72 |
+
debugger.error(f"Super subject {super_subject_name} not found.")
|
73 |
+
return
|
74 |
+
else:
|
75 |
+
debugger.info(f"Super subject {super_subject_name} found.")
|
76 |
+
debugger.success(f"Super subject data:\n{to_csv_str(res)}")
|
77 |
+
return res
|
78 |
+
|
79 |
+
def KHAZANA_SUPER_SUBJECT_BATCH_DATA(khazana_name, super_subject_name, batch_name):
|
80 |
+
api = BatchAPI(khazana_name, token, force=False, verbose=False)
|
81 |
+
|
82 |
+
|
83 |
+
|
84 |
+
res = api.GET_KHAZANA_CHAPTERS(batch_name)
|
85 |
+
|
86 |
+
# if batch does not exist
|
87 |
+
if not res:
|
88 |
+
debugger.error(f"Batch {batch_name} not found.")
|
89 |
+
return
|
90 |
+
else:
|
91 |
+
debugger.info(f"Batch {batch_name} found.")
|
92 |
+
debugger.success(f"Batch data:\n{to_csv_str(res)}")
|
93 |
+
return res
|
94 |
+
|
95 |
+
def KHAZANA_BATCH_CHAPTER_DATA(khazana_name, super_subject_name, batch_name, chapter_name):
|
96 |
+
api = BatchAPI(khazana_name, token, force=False, verbose=False)
|
97 |
+
|
98 |
+
res = api.GET_KHAZANA_LECTURES(batch_name, chapter_name, chapter_name)
|
99 |
+
|
100 |
+
# if chapter does not exist
|
101 |
+
if not res:
|
102 |
+
debugger.error(f"Chapter {chapter_name} not found.")
|
103 |
+
return
|
104 |
+
else:
|
105 |
+
debugger.info(f"Chapter {chapter_name} found.")
|
106 |
+
debugger.success(f"Chapter data:\n{to_csv_str(res)}")
|
107 |
+
return res
|
108 |
+
|
109 |
+
# khazana flow
|
110 |
+
"""
|
111 |
+
# khazana[batch] ---> super-subject[batch] ---> batch ---> chapter ---> lectures
|
112 |
+
"""
|
113 |
+
|
114 |
+
if opts.khazana:
|
115 |
+
api = BatchAPI(opts.khazana, token, force=False, verbose=False)
|
116 |
+
|
117 |
+
|
118 |
+
if opts.khazana and opts.batch_slug and opts.subject_slug and opts.chapter_slug:
|
119 |
+
# khazana[batch] ---> super-subject[batch] ---> batch ---> chapter ---> lectures
|
120 |
+
debugger.info(f"Generating data for khazana: {opts.khazana}, super_subject: {opts.batch_slug}, batch: {opts.subject_slug}, chapter: {opts.chapter_slug}")
|
121 |
+
chapter = KHAZANA_BATCH_CHAPTER_DATA(opts.khazana, opts.batch_slug, opts.subject_slug, opts.chapter_slug)
|
122 |
+
|
123 |
+
if not chapter:
|
124 |
+
debugger.error(f"Chapter {opts.chapter_slug} not found.")
|
125 |
+
return
|
126 |
+
else:
|
127 |
+
debugger.info(f"Chapter data:\n{to_csv_str(chapter)}")
|
128 |
+
return chapter
|
129 |
+
|
130 |
+
if opts.batch_slug and opts.subject_slug:
|
131 |
+
batch = KHAZANA_SUPER_SUBJECT_BATCH_DATA(opts.khazana, opts.batch_slug, opts.subject_slug)
|
132 |
+
|
133 |
+
if not batch:
|
134 |
+
debugger.error(f"Batch {opts.subject_slug} not found.")
|
135 |
+
return
|
136 |
+
else:
|
137 |
+
debugger.info(f"Batch data:\n{to_csv_str(batch)}")
|
138 |
+
return batch
|
139 |
+
|
140 |
+
|
141 |
+
if opts.batch_slug:
|
142 |
+
# Batch slug is actually used here for super subject
|
143 |
+
# khazana[batch] ---> super-subject[batch]
|
144 |
+
|
145 |
+
debugger.info(f"Generating data for super_subject: {opts.batch_slug}")
|
146 |
+
super_subject = KHAZANA_GET_SUPER_SUBJECT_DATA(opts.khazana, opts.batch_slug)
|
147 |
+
|
148 |
+
if not super_subject:
|
149 |
+
debugger.error(f"Super subject {opts.batch_slug} not found.")
|
150 |
+
return
|
151 |
+
else:
|
152 |
+
debugger.info(f"Super subject data:\n{to_csv_str(super_subject)}")
|
153 |
+
return super_subject
|
154 |
+
|
155 |
+
|
156 |
+
|
157 |
+
|
158 |
+
|
159 |
+
|
160 |
+
|
161 |
+
|
162 |
+
|
163 |
+
|
164 |
+
debugger.info(f"Generating super-subjects for khazana batch: {opts.khazana}")
|
165 |
+
super_subjects = api.GET_KHAZANA_SUBJECTS()
|
166 |
+
debugger.info(f"Got this data for super-subjects for khazana batch: {opts.khazana}")
|
167 |
+
debugger.success(f"{to_csv_str(super_subjects)}")
|
168 |
+
|
169 |
+
|
170 |
+
|
171 |
+
|
172 |
+
else:
|
173 |
+
# normal flow
|
174 |
+
api = BatchAPI(opts.batch_slug, token, force=False, verbose=False)
|
175 |
+
|
176 |
+
|
177 |
+
|
beta/shellLogic/handleLogics/HandleHell.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.shellLogic.Plugin import Plugin
|
2 |
+
|
3 |
+
|
4 |
+
class HandleHell(Plugin):
|
5 |
+
|
6 |
+
def __init__(self):
|
7 |
+
super().__init__()
|
8 |
+
self.add_command("hell", "Hell with World!", r'hell', self.hell)
|
9 |
+
self.register_commands()
|
10 |
+
|
11 |
+
def hell(self, args):
|
12 |
+
print("Hell with World! ", args)
|
beta/shellLogic/handleLogics/HandleKeyAndAvailiblity.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.shellLogic.Plugin import Plugin
|
2 |
+
from mainLogic.big4.Ravenclaw_decrypt.key import LicenseKeyFetcher
|
3 |
+
from mainLogic.utils import glv_var
|
4 |
+
from mainLogic.startup.checkup import CheckState
|
5 |
+
|
6 |
+
class HandleKeyAndAvailability(Plugin):
|
7 |
+
def __init__(self):
|
8 |
+
super().__init__()
|
9 |
+
ch = CheckState()
|
10 |
+
self.prefs = ch.checkup(glv_var.EXECUTABLES, verbose=False)['prefs']
|
11 |
+
self.token = self.prefs['token']
|
12 |
+
self.random_id = self.prefs['random_id']
|
13 |
+
self.lkf = LicenseKeyFetcher(self.token, self.random_id)
|
14 |
+
self.commandList = {
|
15 |
+
"get_key": {
|
16 |
+
"desc": "Retrieve a license key",
|
17 |
+
"regex": r"(get_key|key)",
|
18 |
+
"func": self.get_key,
|
19 |
+
},
|
20 |
+
"check": {
|
21 |
+
"desc": "Check availability of the license key",
|
22 |
+
"regex": r"check",
|
23 |
+
"func": self.check
|
24 |
+
}
|
25 |
+
}
|
26 |
+
self.register_commands()
|
27 |
+
|
28 |
+
def get_key(self, args=[]):
|
29 |
+
if args:
|
30 |
+
self.lkf.get_key(args[0])
|
31 |
+
else:
|
32 |
+
print("Please provide a key to get")
|
33 |
+
|
34 |
+
def check(self, args=[]):
|
35 |
+
print("Checking the availability of the key...")
|
36 |
+
if args:
|
37 |
+
if self.lkf.get_key(args[0], verbose=False):
|
38 |
+
print("Key is available")
|
39 |
+
else:
|
40 |
+
print("Key is not available")
|
41 |
+
else:
|
42 |
+
print("Please provide a key to check")
|
beta/shellLogic/handleLogics/HandleQuestions.py
ADDED
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
from urllib.parse import unquote
|
3 |
+
|
4 |
+
from jinja2.utils import url_quote
|
5 |
+
from tabulate import tabulate
|
6 |
+
from bs4 import BeautifulSoup # Import BeautifulSoup for basic HTML parsing
|
7 |
+
|
8 |
+
from beta.question_scraper.app import QuestionsAPI
|
9 |
+
from beta.shellLogic.Plugin import Plugin
|
10 |
+
from mainLogic.utils.dependency_checker import re_check_dependencies
|
11 |
+
from mainLogic.utils.glv import Global
|
12 |
+
|
13 |
+
import textwrap
|
14 |
+
|
15 |
+
|
16 |
+
def wrap_text(text, width=50):
|
17 |
+
return '\n'.join(textwrap.wrap(text, width))
|
18 |
+
|
19 |
+
|
20 |
+
def parse_html(html_content):
|
21 |
+
"""Parse basic HTML content to plain text."""
|
22 |
+
soup = BeautifulSoup(html_content, 'html.parser')
|
23 |
+
return soup.get_text(separator='\n') # Separate elements by newline
|
24 |
+
|
25 |
+
|
26 |
+
class HandleQuestions(Plugin):
|
27 |
+
|
28 |
+
def __init__(self):
|
29 |
+
super().__init__()
|
30 |
+
self.add_command("ques", "Get questions from the API", r'ques', self.ques)
|
31 |
+
self.register_commands()
|
32 |
+
|
33 |
+
def ques(self, args):
|
34 |
+
|
35 |
+
import argparse
|
36 |
+
parser = argparse.ArgumentParser(description='Get questions from the API')
|
37 |
+
parser.add_argument('-B', '--batch', help='Batch ID', required=False)
|
38 |
+
parser.add_argument('-S', '--subject', help='Subject ID', required=False)
|
39 |
+
parser.add_argument('-C', '--chapter', help='Chapter ID', required=False)
|
40 |
+
|
41 |
+
args = parser.parse_args(args)
|
42 |
+
|
43 |
+
if args.batch:
|
44 |
+
batch_id = args.batch
|
45 |
+
else:
|
46 |
+
batch_id = None
|
47 |
+
|
48 |
+
if args.subject:
|
49 |
+
subject_id = args.subject
|
50 |
+
else:
|
51 |
+
subject_id = None
|
52 |
+
|
53 |
+
if args.chapter:
|
54 |
+
chapter_id = args.chapter
|
55 |
+
else:
|
56 |
+
chapter_id = None
|
57 |
+
|
58 |
+
state, prefs = re_check_dependencies()
|
59 |
+
|
60 |
+
token = prefs['token']
|
61 |
+
random_id = prefs['random_id']
|
62 |
+
|
63 |
+
qApi = QuestionsAPI(token, random_id, force=False, verbose=False)
|
64 |
+
|
65 |
+
# Get the questions
|
66 |
+
subjects_dat = qApi.GET_SUBJECTS()
|
67 |
+
|
68 |
+
p_Dat = []
|
69 |
+
headers = ['Subject Name', 'Subject ID']
|
70 |
+
|
71 |
+
for subject in subjects_dat['subjects']:
|
72 |
+
p_Dat.append([subject['englishName'], subject['subjectId']])
|
73 |
+
|
74 |
+
Global.hr()
|
75 |
+
debugger.success(f"Exams: {subjects_dat['exams']}")
|
76 |
+
debugger.success(f"ExamCategorie: {subjects_dat['examCategory']}")
|
77 |
+
Global.hr()
|
78 |
+
|
79 |
+
if not subject_id:
|
80 |
+
print(tabulate(p_Dat, headers=headers, tablefmt='grid'))
|
81 |
+
|
82 |
+
if subject_id and not chapter_id:
|
83 |
+
debugger.success(f"Getting chapters for subject {subject_id}")
|
84 |
+
chapters_dat = qApi.GET_CHAPTERS(subject_id=subject_id)
|
85 |
+
|
86 |
+
headers = ['Chapter Name', 'Chapter ID', 'Class ID', 'Easy', 'Medium', 'Hard']
|
87 |
+
|
88 |
+
s_dat = []
|
89 |
+
for chapter in chapters_dat:
|
90 |
+
s_dat.append(
|
91 |
+
[chapter['englishName'], chapter['chapterId'], chapter['classId'], chapter['questionCountEasy'],
|
92 |
+
chapter['questionCountMedium'], chapter['questionCountHard']])
|
93 |
+
|
94 |
+
Global.hr()
|
95 |
+
print(tabulate(s_dat, headers=headers, tablefmt='grid'))
|
96 |
+
Global.hr()
|
97 |
+
|
98 |
+
if chapter_id and subject_id:
|
99 |
+
debugger.success(f"Getting questions for subject {subject_id} and chapter {chapter_id}")
|
100 |
+
questions_dat = qApi.GET_QUESTION(subject_id=subject_id, chapters=[
|
101 |
+
{'chapterId': chapter_id, 'classId': 'oyhh7ve8217so92jw81tefbyp'}], difficulty_level=[3],
|
102 |
+
questions_count=90)
|
103 |
+
|
104 |
+
headers = ['Question ID', 'Question Text', 'Contents', 'Options', 'Type']
|
105 |
+
|
106 |
+
s_dat = []
|
107 |
+
for question in questions_dat['questions']:
|
108 |
+
question_text = wrap_text(parse_html(str(question['plainQuestionText'])), 50)
|
109 |
+
options_text = "\n".join([wrap_text(parse_html(f"* {option['text']}"), 80) for option in question['options']])
|
110 |
+
contents = wrap_text(parse_html(str(question['content'])), 50) if 'content' in question else ''
|
111 |
+
s_dat.append(
|
112 |
+
[question['questionId'], unquote(question_text), unquote(contents), unquote(options_text), question['type']]
|
113 |
+
)
|
114 |
+
|
115 |
+
Global.hr()
|
116 |
+
print(tabulate(s_dat, headers=headers, tablefmt='grid'))
|
117 |
+
Global.hr()
|
beta/shellLogic/handleLogics/HandleShellDL.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.shellLogic.Plugin import Plugin
|
2 |
+
from mainLogic.big4.Ravenclaw_decrypt.key import LicenseKeyFetcher
|
3 |
+
from mainLogic.big4.obsolete.Obsolete_Gryffindor_downloadv2 import Download
|
4 |
+
from mainLogic.startup.checkup import CheckState
|
5 |
+
from mainLogic.utils import glv_var
|
6 |
+
from mainLogic import downloader
|
7 |
+
|
8 |
+
|
9 |
+
class HandleShellDL(Plugin):
|
10 |
+
def __init__(self):
|
11 |
+
super().__init__()
|
12 |
+
self.commandList = {
|
13 |
+
"edl": {
|
14 |
+
"desc": "Enhanced download with name and ID",
|
15 |
+
"regex": r"edl",
|
16 |
+
"func": self.edownload
|
17 |
+
},
|
18 |
+
"dl": {
|
19 |
+
"desc": "Download with name and ID",
|
20 |
+
"regex": r"dl",
|
21 |
+
"func": self.download
|
22 |
+
}
|
23 |
+
}
|
24 |
+
self.register_commands()
|
25 |
+
|
26 |
+
def edownload(self, args=[]):
|
27 |
+
"""
|
28 |
+
Performs an enhanced download using the provided name and ID.
|
29 |
+
"""
|
30 |
+
if not args or len(args) < 2:
|
31 |
+
print("Please provide a name and id")
|
32 |
+
return
|
33 |
+
|
34 |
+
name = args[0]
|
35 |
+
id = args[1]
|
36 |
+
|
37 |
+
ch = CheckState()
|
38 |
+
state = ch.checkup(glv_var.EXECUTABLES, verbose=False)
|
39 |
+
prefs = state['prefs']
|
40 |
+
|
41 |
+
token = prefs['token']
|
42 |
+
random_id = prefs['random_id']
|
43 |
+
|
44 |
+
fetcher = LicenseKeyFetcher(token, random_id)
|
45 |
+
fetcher.get_key(id)
|
46 |
+
|
47 |
+
url = fetcher.url
|
48 |
+
cookies = fetcher.cookies
|
49 |
+
|
50 |
+
|
51 |
+
Download(
|
52 |
+
vsd_path=prefs['vsd'],
|
53 |
+
url=url,
|
54 |
+
name=name,
|
55 |
+
cookie=cookies,
|
56 |
+
tmp_path=prefs['tmpDir'],
|
57 |
+
output_path=prefs['dir'],
|
58 |
+
).download()
|
59 |
+
|
60 |
+
def download(self, args=[]):
|
61 |
+
"""
|
62 |
+
Performs a basic download using the provided name and ID.
|
63 |
+
"""
|
64 |
+
if not args or len(args) < 2:
|
65 |
+
print("Please provide a name and id")
|
66 |
+
return
|
67 |
+
|
68 |
+
name = args[0]
|
69 |
+
id = args[1]
|
70 |
+
|
71 |
+
downloader.main(
|
72 |
+
id=id,
|
73 |
+
name=name,
|
74 |
+
)
|
beta/shellLogic/handleLogics/HandleWEB.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.shellLogic.Plugin import Plugin
|
2 |
+
|
3 |
+
|
4 |
+
class HandleWEB(Plugin):
|
5 |
+
|
6 |
+
def __init__(self):
|
7 |
+
super().__init__()
|
8 |
+
self.add_command("web", "Open the web interface", "web", self.web)
|
9 |
+
self.register_commands()
|
10 |
+
|
11 |
+
|
12 |
+
def web(self, args):
|
13 |
+
# Open the web interface
|
14 |
+
print("Opening the web interface...")
|
beta/shellLogic/logic.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from beta.shellLogic.Plugin import Plugin
|
2 |
+
from beta.shellLogic.TokenUpdate import TokenUpdate
|
3 |
+
from beta.shellLogic.handleLogics.HandleBasicCMDUtils import HandleBasicCMDUtils
|
4 |
+
from beta.shellLogic.handleLogics.HandleBatch import KhazanaHandler
|
5 |
+
from beta.shellLogic.handleLogics.HandleHell import HandleHell
|
6 |
+
from beta.shellLogic.handleLogics.HandleKeyAndAvailiblity import HandleKeyAndAvailability
|
7 |
+
#from beta.shellLogic.handleLogics.HandleQuestions import HandleQuestions
|
8 |
+
from beta.shellLogic.handleLogics.HandleShellDL import HandleShellDL
|
9 |
+
from beta.shellLogic.handleLogics.HandleWEB import HandleWEB
|
10 |
+
|
11 |
+
# Instantiate the command handlers (automatically registers commands)
|
12 |
+
basic_cmd_utils = HandleBasicCMDUtils()
|
13 |
+
key_utils = HandleKeyAndAvailability()
|
14 |
+
dl_utils = HandleShellDL()
|
15 |
+
token_update = TokenUpdate()
|
16 |
+
k_batch = KhazanaHandler()
|
17 |
+
webui = HandleWEB()
|
18 |
+
#ques = HandleQuestions()
|
19 |
+
hell = HandleHell()
|
20 |
+
|
21 |
+
def execute_help(command, args=[]):
|
22 |
+
Plugin().help(command)
|
23 |
+
|
24 |
+
def execute(command, args=[]):
|
25 |
+
Plugin().parseAndRun(command, args)
|
beta/shellLogic/logicError.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class commandNotFound(Exception):
|
2 |
+
def __init__(self, command):
|
3 |
+
self.command = command
|
4 |
+
|
5 |
+
def __str__(self):
|
6 |
+
return f"Command '{self.command}' not found"
|
beta/shellLogic/shell.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from prompt_toolkit import PromptSession
|
2 |
+
from mainLogic.utils.glv import Global
|
3 |
+
from mainLogic.startup.checkup import CheckState
|
4 |
+
import json
|
5 |
+
|
6 |
+
from mainLogic.utils.glv_var import EXECUTABLES
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
from prompt_toolkit.completion import Completer, Completion
|
11 |
+
from prompt_toolkit.completion.filesystem import PathCompleter
|
12 |
+
from prompt_toolkit.document import Document
|
13 |
+
|
14 |
+
class CustomCompleter(Completer):
|
15 |
+
def __init__(self):
|
16 |
+
self.file_completer = PathCompleter()
|
17 |
+
|
18 |
+
def get_completions(self, document: Document, complete_event):
|
19 |
+
text = document.text_before_cursor
|
20 |
+
if text.startswith('cd '):
|
21 |
+
for completion in self.file_completer.get_completions(document, complete_event):
|
22 |
+
yield completion
|
23 |
+
|
24 |
+
def main(command_list=[]):
|
25 |
+
# Initialize Prompt Toolkit session
|
26 |
+
session = PromptSession()
|
27 |
+
|
28 |
+
# Perform checkup and get preferences
|
29 |
+
# Hardcoded verbose to False
|
30 |
+
state = CheckState().checkup(EXECUTABLES, './', verbose=False)
|
31 |
+
prefs = state['prefs']
|
32 |
+
|
33 |
+
# Convert preferences to JSON string for display
|
34 |
+
prefs_json = json.dumps(prefs, indent=4)
|
35 |
+
|
36 |
+
# Add a custom completer
|
37 |
+
custom_completer = CustomCompleter()
|
38 |
+
|
39 |
+
from beta.shellLogic import logic
|
40 |
+
|
41 |
+
if command_list:
|
42 |
+
if len(command_list) == 1:
|
43 |
+
command = command_list[0]
|
44 |
+
args = []
|
45 |
+
else:
|
46 |
+
command = command_list[0]
|
47 |
+
args = command_list[1:]
|
48 |
+
|
49 |
+
logic.execute(command, args)
|
50 |
+
|
51 |
+
|
52 |
+
# Command-line interface loop
|
53 |
+
while True:
|
54 |
+
try:
|
55 |
+
user_input = session.prompt('|pwdl> ', completer=custom_completer)
|
56 |
+
|
57 |
+
# just in case the user hits enter without typing anything
|
58 |
+
if not user_input: continue
|
59 |
+
|
60 |
+
command = user_input.split()[0]
|
61 |
+
args = user_input.split()[1:]
|
62 |
+
if not args: args = []
|
63 |
+
|
64 |
+
# check if first arg is /? or -h or --help
|
65 |
+
if args and args[0] in ['/?', '-h', '--help']:
|
66 |
+
logic.execute_help(command)
|
67 |
+
else:
|
68 |
+
logic.execute(command, args)
|
69 |
+
|
70 |
+
except KeyboardInterrupt:
|
71 |
+
continue
|
72 |
+
except EOFError:
|
73 |
+
break
|
74 |
+
|
75 |
+
if __name__ == "__main__":
|
76 |
+
main()
|
beta/shellLogic/shell_var.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from prompt_toolkit.key_binding import KeyBindings
|
2 |
+
from prompt_toolkit.layout import Window, Layout
|
3 |
+
from prompt_toolkit import Application
|
4 |
+
from prompt_toolkit.buffer import Buffer
|
5 |
+
from prompt_toolkit.layout.controls import BufferControl
|
6 |
+
from prompt_toolkit.layout.processors import Processor, BeforeInput
|
7 |
+
|
8 |
+
from beta.shellLogic.Plugin import Plugin
|
9 |
+
from beta.shellLogic.handleLogics.HandleWEB import HandleWEB
|
10 |
+
from beta.shellLogic.handleLogics.HandleBatch import HandleBatch
|
11 |
+
from beta.shellLogic.logic import batch
|
12 |
+
|
13 |
+
# Initialize the plugin and add commands
|
14 |
+
plugin = HandleWEB()
|
15 |
+
batch_h = HandleBatch()
|
16 |
+
|
17 |
+
prompt = "|pwdl> "
|
18 |
+
|
19 |
+
|
20 |
+
def on_input_processed(buffer):
|
21 |
+
if buffer.text.endswith("\n"):
|
22 |
+
command = buffer.text.strip()
|
23 |
+
if command:
|
24 |
+
command = command.split()[0]
|
25 |
+
args = command.split()[1:]
|
26 |
+
if not args:
|
27 |
+
args = []
|
28 |
+
Plugin().parseAndRun(command, args)
|
29 |
+
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
input_buffer = Buffer(on_text_insert=on_input_processed)
|
37 |
+
control = BufferControl(buffer=input_buffer,input_processors=[BeforeInput(lambda: prompt)])
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
kb = KeyBindings()
|
42 |
+
|
43 |
+
@kb.add('c-q')
|
44 |
+
def exit_(event):
|
45 |
+
event.app.exit()
|
46 |
+
|
47 |
+
app = Application(
|
48 |
+
key_bindings=kb,
|
49 |
+
layout=Layout(
|
50 |
+
Window(
|
51 |
+
content=control,
|
52 |
+
)),
|
53 |
+
full_screen=True
|
54 |
+
)
|
55 |
+
|
56 |
+
if __name__ == "__main__":
|
57 |
+
app.run()
|
beta/shellLogic/simpleParser.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
def parseAndRun(commandlist,command,args=[],obj=None):
|
2 |
+
if command in commandlist: func = commandlist[command]["func"]
|
3 |
+
|
4 |
+
if not func: return
|
5 |
+
|
6 |
+
func(args)
|
beta/update.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
from mainLogic.utils.glv_var import debugger
|
4 |
+
|
5 |
+
|
6 |
+
class UpdateJSONFile:
|
7 |
+
def __init__(self, file_path, debug=False):
|
8 |
+
self.file_path = file_path
|
9 |
+
self.data = None
|
10 |
+
self.load()
|
11 |
+
|
12 |
+
if debug:
|
13 |
+
debugger.info(f"Debug Mode: Loaded data from {file_path}")
|
14 |
+
debugger.warning(f"Debug Mode: Data: {self.data}")
|
15 |
+
|
16 |
+
def load(self):
|
17 |
+
with open(self.file_path, 'r') as file:
|
18 |
+
self.data = json.load(file)
|
19 |
+
|
20 |
+
def save(self):
|
21 |
+
with open(self.file_path, 'w+') as file:
|
22 |
+
file.write(json.dumps(self.data, indent=4))
|
23 |
+
|
24 |
+
# manually check if the file is saved correctly
|
25 |
+
with open(self.file_path, 'r') as file:
|
26 |
+
saved_data = json.load(file)
|
27 |
+
if saved_data != self.data:
|
28 |
+
debugger.error("Error: Data not saved correctly.")
|
29 |
+
else:
|
30 |
+
debugger.info("Data saved correctly.")
|
31 |
+
|
32 |
+
def update(self, key, value, debug=False):
|
33 |
+
|
34 |
+
if debug:
|
35 |
+
print(f"Debug Mode: Updating {key} to {value}")
|
36 |
+
|
37 |
+
self.data[key] = value
|
38 |
+
self.save()
|
beta/util.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import re
|
3 |
+
|
4 |
+
def extract_uuid(text):
|
5 |
+
"""
|
6 |
+
Extracts UUIDs from a string using a regular expression.
|
7 |
+
|
8 |
+
Args:
|
9 |
+
text: The string to search for UUIDs.
|
10 |
+
|
11 |
+
Returns:
|
12 |
+
A list of extracted UUIDs, or an empty list if none are found.
|
13 |
+
"""
|
14 |
+
pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
|
15 |
+
matches = re.findall(pattern, text)
|
16 |
+
return matches
|
17 |
+
|
18 |
+
def generate_safe_file_name(name):
|
19 |
+
"""
|
20 |
+
Generates a safe file name by replacing spaces with underscores and removing special characters.
|
21 |
+
|
22 |
+
Args:
|
23 |
+
name: The original name to be converted.
|
24 |
+
|
25 |
+
Returns:
|
26 |
+
A string that is a safe file name.
|
27 |
+
"""
|
28 |
+
# Replace spaces with underscores and remove special characters
|
29 |
+
safe_name = re.sub(r'[^\w\s-]', '', name).strip().replace(' ', '_')
|
30 |
+
return safe_name
|
defaults.json
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cloudfront_id": "d1d34p8vz63oiq",
|
3 |
+
"patched": false,
|
4 |
+
"user_id": "rumouroid",
|
5 |
+
"user_update_index": 10,
|
6 |
+
"os-info": "linux",
|
7 |
+
"tmpDir": "/tmp",
|
8 |
+
"verbose": false,
|
9 |
+
"vsd": "$script/bin/vsd",
|
10 |
+
"ffmpeg": "",
|
11 |
+
"mp4decrypt": "$script/bin/mp4decrypt",
|
12 |
+
"webui-del-time": 45,
|
13 |
+
"webui": true,
|
14 |
+
"webui-port": "5000",
|
15 |
+
"token": {
|
16 |
+
"access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NDk5MjUwNzYuNTQsImRhdGEiOnsiX2lkIjoiNjQzMTc1NDI0MGU5NzkwMDE4MDAyMDJiIiwidXNlcm5hbWUiOiI5NDcyNTA3MTAwIiwiZmlyc3ROYW1lIjoiQWtzaGl0IFNodWJoYW0iLCJsYXN0TmFtZSI6IiIsIm9yZ2FuaXphdGlvbiI6eyJfaWQiOiI1ZWIzOTNlZTk1ZmFiNzQ2OGE3OWQxODkiLCJ3ZWJzaXRlIjoicGh5c2ljc3dhbGxhaC5jb20iLCJuYW1lIjoiUGh5c2ljc3dhbGxhaCJ9LCJlbWFpbCI6ImFrc2hpdHNodWJoYW1tYXNAZ21haWwuY29tIiwicm9sZXMiOlsiNWIyN2JkOTY1ODQyZjk1MGE3NzhjNmVmIl0sImNvdW50cnlHcm91cCI6IklOIiwidHlwZSI6IlVTRVIifSwiaWF0IjoxNzQ5MzIwMjc2fQ.lAO_e1nmZJFrsxng92AxhHIKXXvzTZ02uAviST5J6jA",
|
17 |
+
"refresh_token": "c6e85e3952dc358537be22669e5c46cb1463c071dc85cb9b99a0c1e4d895d82f",
|
18 |
+
"expires_in": 1749925076540,
|
19 |
+
"tokenId": "684482547cd974e07b99536b",
|
20 |
+
"user": {
|
21 |
+
"status": "Active",
|
22 |
+
"id": "6431754240e979001800202b",
|
23 |
+
"firstName": "Akshit Shubham",
|
24 |
+
"lastName": "",
|
25 |
+
"primaryNumber": "9472507100",
|
26 |
+
"countryCode": "+91",
|
27 |
+
"countryGroup": "IN",
|
28 |
+
"username": "9472507100",
|
29 |
+
"uniqueCode": "9472EYDU",
|
30 |
+
"email": "[email protected]",
|
31 |
+
"isVerifiedEmail": false,
|
32 |
+
"created": "2023-04-08T14:08:02.575Z",
|
33 |
+
"createdAt": "2023-04-08T14:08:02.575Z",
|
34 |
+
"profileId": {
|
35 |
+
"exams": [
|
36 |
+
"NEET"
|
37 |
+
],
|
38 |
+
"_id": "6431754240e979001800202d",
|
39 |
+
"created": "2023-04-08T14:08:02.590Z",
|
40 |
+
"createdAt": "2023-04-08T14:08:02.590Z",
|
41 |
+
"class": "11",
|
42 |
+
"programId": "60910f9463f1f5004815a4c0",
|
43 |
+
"wallet": 0,
|
44 |
+
"totalRewards": 0,
|
45 |
+
"coins": {
|
46 |
+
"bankTransferCoins": 0,
|
47 |
+
"voucherCoins": 0,
|
48 |
+
"commonCoins": 0,
|
49 |
+
"walletCoins": 0,
|
50 |
+
"totalCoins": 0,
|
51 |
+
"redeemedCoins": 0
|
52 |
+
},
|
53 |
+
"parentDetails": {},
|
54 |
+
"isProfileCompleted": true,
|
55 |
+
"children": [],
|
56 |
+
"cohortId": "634fd1c40c274c00183c7579",
|
57 |
+
"needCohortUpdate": false,
|
58 |
+
"cohortUpdatedAt": "2023-04-15T09:28:16.616Z"
|
59 |
+
},
|
60 |
+
"dateOfBirth": null,
|
61 |
+
"phones": [],
|
62 |
+
"address": null,
|
63 |
+
"primary": null,
|
64 |
+
"organization": {
|
65 |
+
"_id": "5eb393ee95fab7468a79d189",
|
66 |
+
"status": "Active",
|
67 |
+
"usersCount": 0,
|
68 |
+
"name": "Physicswallah",
|
69 |
+
"website": "physicswallah.com",
|
70 |
+
"createdAt": "2020-05-07T04:51:58.030Z",
|
71 |
+
"updatedAt": "2020-05-07T04:51:58.030Z",
|
72 |
+
"phones": [],
|
73 |
+
"rejectionComments": [],
|
74 |
+
"enrolmentType": "Client",
|
75 |
+
"primaryContact": "5eb393ee95fab7468a79d18b"
|
76 |
+
},
|
77 |
+
"roles": [
|
78 |
+
{
|
79 |
+
"id": "5b27bd965842f950a778c6ef",
|
80 |
+
"name": "Student"
|
81 |
+
}
|
82 |
+
],
|
83 |
+
"androidVersion": 165,
|
84 |
+
"iosVersion": 0,
|
85 |
+
"webVersion": 1925,
|
86 |
+
"isScholar": false
|
87 |
+
}
|
88 |
+
}
|
89 |
+
}
|