import os, sys import time import csv import requests import pickle, json from inspect import currentframe, getframeinfo from collections import defaultdict import hashlib def processing(datafile): ifile = open(datafile, errors="ignore") reader = csv.reader(ifile) ofile = open(datafile + ".clean", "w") writer = csv.writer(ofile) ttl_dict = {} with open("ttl_dict.pickle", "rb") as f: ttl_dict = pickle.load(f) n = 0 n_no_ttl, n_use_ttl_dict = 0, 0 no_age = 0 n_time_change = 0 last_ts = 0 for row in reader: # ts country obj sz content tier ttl age cstatus method zoneId # 1628962150 238 8881329091665286219 1031315 bin 0 2666005146 1628962150 hit GET 438702142 ark-us-static-online.yo-star.com /assetbundle/official/Android/assets/21-07-28-08-08-11-1b23f7/avg_images_avg_6_17.dat # 1628962464 238 11714083853013678418 1403 svg 0 2666004832 758240 miss GET 444690466 www.envive.com /Assets/Envive-DTC/img/img-1200x200-mobile-back.svg n += 1 try: # ts, country, colo, obj, sz, content_type, tieed_hit, ttl, age, cstatus, method, zone_id, edgehost, referhost, hostname, path, query_string = row ts, country, colo, obj, obj2, sz, content_type, tieed_hit, ttl, age, cstatus, method, zone_id, edgehost, referhost, hostname, path, query_string = row except Exception as e: print(e, row) continue ts, ttl, age = int(ts), int(ttl), int(age) if ts < last_ts: # ts = last_ts n_time_change += 1 last_ts = ts if ttl > 86400*365 and (hostname, content_type) in ttl_dict: ttl = ttl_dict[(hostname, content_type)] n_use_ttl_dict += 1 if ttl > 86400*365*30: n_no_ttl += 1 ttl = -1 if age > 86400*365*30: no_age += 1 age = -1 # hostname = str(int(hashlib.sha1(hostname.encode()).hexdigest(), 16) % (2**30)) # edgehost = str(int(hashlib.sha1(edgehost.encode()).hexdigest(), 16) % (2**30)) # referhost = str(int(hashlib.sha1(referhost.encode()).hexdigest(), 16) % (2**30)) # zone_id = str(int(hashlib.sha1(zone_id.encode()).hexdigest(), 16) % (2**30)) hostname = str(int(hashlib.md5(hostname.encode()).hexdigest(), 16) % (2**30)) edgehost = str(int(hashlib.md5(edgehost.encode()).hexdigest(), 16) % (2**30)) referhost = str(int(hashlib.md5(referhost.encode()).hexdigest(), 16) % (2**30)) zone_id = str(int(hashlib.md5(zone_id.encode()).hexdigest(), 16) % (2**30)) has_query_string = len(query_string.strip()) > 0 if has_query_string: has_query_string = "1" else: has_query_string = "0" n_param = str(query_string.count("&")+1) n_level = str(path.count("/") - 1) extension = path.split(".")[-1] if "-" in extension or "/" in extension or "_" in extension or len(extension) > 10: extension = "" ofile.write(",".join([str(ts), country, colo, obj, obj2, sz, content_type, extension, n_level, str(ttl), str(age), cstatus, method, zone_id, hostname, edgehost, referhost, has_query_string, n_param])+"\n") if n % 20000000 == 0: print("{:.0f} {} req, {} missingTTL, {} replaceTTL, {} noAge, {} timeDiff".format(time.time(), n//1e6, n_no_ttl//1e6, n_use_ttl_dict//1e6, no_age//1e6, n_time_change)) ifile.close() ofile.close() if __name__ == "__main__": # processing("colo_0814_17_0") processing(sys.argv[1])