import os, sys import re import json import base64 import argparse import pprint from collections import defaultdict def gen_op(input_path, db, op): keys = _extract_keys(input_path) with open(input_path.replace("keys", op)+".input", "w") as ofile: ofile.write("select {}\r\n".format(db)) for key in keys: ofile.write('{} "{}"\r\n'.format(op, key)) def _extract_keys(input_path): keys = [] regex_first_line = re.compile(r"2\)\s+1\) \"(?P.+)\"") regex = re.compile(r"\s*\d+\) \"(?P.+)\"\n") with open(input_path) as ifile: for line in ifile: if line.startswith("OK") or line.startswith("1) \"0\""): # print("skip {}".format(line)) continue if line[0] == "(" and "s)" in line: # print("skip {}".format(line)) continue try: m = regex.match(line) if m is None: m = regex_first_line.match(line) keys.append(m.group("key")) except Exception as e: print(e, line) return keys def _extract_ttl(input_path): ttls = [] with open(input_path) as ifile: for line in ifile: if line.startswith("OK"): continue ttls.append(int(line[1:])) return ttls def _extract_klen(input_path): klens = [] with open(input_path) as ifile: for line in ifile: if line.startswith("OK"): continue klens.append(int(line[1:])) return klens def convert_state(input_path, ): klen_dict, ttl_dict = {}, {} state_dict = {} keys = _extract_keys("{}.keys".format(input_path)) klen = _extract_klen("{}.keylen".format(input_path)) ttls = _extract_ttl("{}.ttl".format(input_path)) if len(keys) != len(klen) or len(keys) != len(ttls): print("error {} {} {} {}".format(input_path, len(keys), len(klen), len(ttls))) print("{} {}".format(keys[-1], klen[-1])) # pprint.pprint(klen) sys.exit(1) for i in range(len(keys)): keys[i] = keys[i].replace('\\\\"', 'a"').replace('\\"', 'a') # state_dict[hash(keys[i])] = (klen[i], ttls[i]) state_dict[keys[i]] = (klen[i], ttls[i]) # print(keys[i].replace('\\\\"', 'a"').replace('\\"', 'a'), hash(keys[i].replace('\\\\"', 'a"').replace('\\"', 'a'))) # sys.exit(1) with open("{}.state.json".format(input_path), "w") as ofile: json.dump(state_dict, ofile) # n_in, n_no = 0, 0 # not_in = defaultdict(int) # trace_path = "fanout.smf1-cba-21-sr1.prod.twitter.com.31654.log.1" # trace_path = "nonfanout.smf1-bos-12-sr1.prod.twitter.com.31772.log.1" # trace_path = "fanout.smf1-hax-32-sr1.prod.twitter.com.31881.log.1" # with open(trace_path) as ifile: # for line in ifile: # line_split = line.split("\t") # key = line_split[3].strip() # # print(key) # # key = hash(key) # if key in state_dict: # # print(state_dict[hash(key)]) # # assert hash(key) in state_dict # n_in += 1 # else: # print(line) # print(key) # if n_no > 20: # 1/0 # not_in[key] += 1 # n_no += 1 # pprint.pprint(not_in) # print("{}/{}".format(n_in, n_in + n_no)) def load_state(trace_path, state_path): n_in, n_no = 0, 0 with open(state_path) as ifile: state_dict = json.load(ifile) with open(trace_path) as ifile: for line in ifile: line_split = line.split("\t") key = line_split[3] if key in state_dict: n_in += 1 else: n_no += 1 print("{}/{}".format(n_in, n_in + n_no)) if __name__ == "__main__": parser = argparse.ArgumentParser(description="generate some data") parser.add_argument('--func', dest="func", type=str, required=True, help="gen_op/convert_state") parser.add_argument('--path', dest="path", type=str, required=True, help="path to input") parser.add_argument('--db', dest="db", type=int, default=-1, help="db used in gen_op") parser.add_argument('--op', dest='op', default="", help="op used in gen_op") parser.add_argument('--path2', dest="path2", type=str, default="", help="path to input") args = parser.parse_args() if args.func == "gen_op": assert(args.db != -1) assert(args.op in ("ttl", "llen", "card", "test")) gen_op(args.path, args.db, args.op) elif args.func == "convert_state": convert_state(args.path) elif args.func == "load_state": load_state(args.path, args.path2) else: raise RuntimeError("unknown func {}".format(args.func))