Spaces:
Sleeping
Sleeping
File size: 5,207 Bytes
cd23862 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 |
import json, environ, requests, os, subprocess
import asyncio, uuid, shutil
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest, StreamingHttpResponse
from django_ratelimit.decorators import ratelimit
from django.views.decorators.csrf import csrf_exempt
from asgiref.sync import sync_to_async
from backend.module import web_scrap
from backend.module.utils import manage_image
from backend.models.model_cache import RequestCache
from core.settings import BASE_DIR
from backend.module.utils import cloudflare_turnstile
from backend.models.model_1 import WebscrapeGetCoverCache
from backend.module.utils import directory_info, date_utils
env = environ.Env()
STORAGE_DIR = os.path.join(BASE_DIR,"storage")
if not os.path.exists(STORAGE_DIR): os.makedirs(STORAGE_DIR)
@csrf_exempt
@ratelimit(key='ip', rate='20/m')
def get_list(request):
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
if not cloudflare_turnstile.check(token): return HttpResponseBadRequest('Cloudflare turnstile token not existed or expired!', status=511)
payload = json.loads(request.body)
search = payload.get("search")
page = payload.get("page")
source = payload.get("source")
if search.get("text"): DATA = web_scrap.source_control[source].search.scrap(search=search,page=page)
else: DATA = web_scrap.source_control["colamanga"].get_list.scrap(page=page)
return JsonResponse({"data":DATA})
@csrf_exempt
@ratelimit(key='ip', rate='20/m')
def get(request):
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
if not cloudflare_turnstile.check(token): return HttpResponseBadRequest('Cloudflare turnstile token not existed or expired!', status=511)
payload = json.loads(request.body)
id = payload.get("id")
source = payload.get("source")
try:
DATA = web_scrap.source_control[source].get.scrap(id=id)
return JsonResponse({"data":DATA})
except Exception as e:
return HttpResponseBadRequest(str(e), status=500)
@ratelimit(key='ip', rate='60/m')
def get_cover(request,source,id,cover_id):
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
if not cloudflare_turnstile.check(token): return HttpResponseBadRequest('Cloudflare turnstile token not existed or expired!', status=511)
file_path = ""
file_name = ""
chunk_size = 8192
MAX_COVER_STORAGE_SIZE = 10 * 1024 * 1024 * 1024
try:
query_result = WebscrapeGetCoverCache.objects.filter(source=source,comic_id=id,cover_id=cover_id).first()
if (
query_result
and os.path.exists(query_result.file_path)
and query_result.datetime >= date_utils.utc_time().add(-5,'hour').get()
):
file_path = query_result.file_path
file_name = os.path.basename(file_path)
else:
if not os.path.exists(os.path.join(STORAGE_DIR,"covers")): os.makedirs(os.path.join(STORAGE_DIR,"covers"))
while True:
storage_size = directory_info.GetDirectorySize(directory=os.path.join(STORAGE_DIR,"covers"),max_threads=5)
if (storage_size >= MAX_COVER_STORAGE_SIZE):
query_result = WebscrapeGetCoverCache.objects.order_by("datetime").first()
if (query_result):
file_path = query_result.file_path
if os.path.exists(file_path): shutil.rmtree(file_path)
WebscrapeGetCoverCache.objects.filter(file_path=query_result.file_path).delete()
else:
shutil.rmtree(os.path.join(STORAGE_DIR,"covers"))
break
else: break
print(storage_size)
DATA = web_scrap.source_control[source].get_cover.scrap(id=id,cover_id=cover_id)
if not DATA: HttpResponseBadRequest('Image Not found!', status=404)
file_path = os.path.join(STORAGE_DIR,"covers",f'{source}-{id}-{cover_id}.png')
file_name = os.path.basename(file_path)
with open(file_path, "wb") as f: f.write(DATA)
WebscrapeGetCoverCache(
file_path=file_path,
source=source,
comic_id=id,
cover_id=cover_id,
).save()
def file_iterator():
with open(file_path, 'rb') as f:
while chunk := f.read(chunk_size):
yield chunk
response = StreamingHttpResponse(file_iterator())
response['Content-Type'] = 'application/octet-stream'
response['Content-Length'] = os.path.getsize(file_path)
response['Content-Disposition'] = f'attachment; filename="{file_name}"'
return response
except Exception as e:
return HttpResponseBadRequest(str(e), status=500)
|