Anisha Bhatnagar commited on
Commit
8db24a7
·
1 Parent(s): c0e59d3

updated file open mode in cache writing

Browse files
Files changed (1) hide show
  1. utils/llm_feat_utils.py +4 -2
utils/llm_feat_utils.py CHANGED
@@ -21,7 +21,7 @@ def _feat_hash(feature: str, text: str) -> str:
21
  blob = json.dumps({
22
  "version": CACHE_VERSION,
23
  "text": text,
24
- "features": sorted(feature)
25
  }, sort_keys=True).encode()
26
  return hashlib.md5(blob).hexdigest()
27
 
@@ -105,8 +105,10 @@ def generate_feature_spans_cached(client, text: str, features: list[str], role:
105
 
106
  h = _feat_hash(feat, text)
107
  if h in cache:
 
108
  result[feat] = cache[h]["spans"]
109
  else:
 
110
  missing_feats.append(feat)
111
 
112
  if missing_feats:
@@ -123,7 +125,7 @@ def generate_feature_spans_cached(client, text: str, features: list[str], role:
123
  result[feat] = spans
124
 
125
  # 5) write back the combined cache
126
- with open(cache_path, "w") as f:
127
  json.dump(cache, f, indent=2)
128
  return result
129
 
 
21
  blob = json.dumps({
22
  "version": CACHE_VERSION,
23
  "text": text,
24
+ "features": feature
25
  }, sort_keys=True).encode()
26
  return hashlib.md5(blob).hexdigest()
27
 
 
105
 
106
  h = _feat_hash(feat, text)
107
  if h in cache:
108
+ print(f"Found feature: {feat}")
109
  result[feat] = cache[h]["spans"]
110
  else:
111
+ print(f"Missing feature: {feat}")
112
  missing_feats.append(feat)
113
 
114
  if missing_feats:
 
125
  result[feat] = spans
126
 
127
  # 5) write back the combined cache
128
+ with open(cache_path, "a") as f:
129
  json.dump(cache, f, indent=2)
130
  return result
131