Commit
·
2178182
1
Parent(s):
a378726
tested working version of weight subconfig
Browse files- pythia-training-metrics.py +12 -37
pythia-training-metrics.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
| 1 |
import datasets
|
| 2 |
import pickle
|
| 3 |
|
| 4 |
-
|
| 5 |
_DESCRIPTION = """\
|
| 6 |
Dataset for storing training metrics of pythia models
|
| 7 |
"""
|
|
@@ -12,10 +11,8 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
|
|
| 12 |
"70m",
|
| 13 |
"160m",
|
| 14 |
"410m",
|
| 15 |
-
"1b",
|
| 16 |
"1.4b",
|
| 17 |
"2.8b",
|
| 18 |
-
"6.9b"
|
| 19 |
]
|
| 20 |
|
| 21 |
_GRADIENTS_DESCRIPTION = """\
|
|
@@ -56,36 +53,16 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
|
|
| 56 |
description=_WEIGHTS_DESCRIPTION,
|
| 57 |
version="1.0.0",
|
| 58 |
),
|
| 59 |
-
|
| 60 |
-
name="all",
|
| 61 |
-
description="All the metrics",
|
| 62 |
-
version="1.0.0",
|
| 63 |
-
)
|
| 64 |
-
]
|
| 65 |
|
| 66 |
def _info(self):
|
| 67 |
"""
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
how do we do this if each feature is dependent on the model size?
|
| 71 |
"""
|
| 72 |
|
| 73 |
-
features_dict = {
|
| 74 |
-
"checkpoint_step": datasets.Value('int32'),
|
| 75 |
-
"layer_name": datasets.Value('string'),
|
| 76 |
-
}
|
| 77 |
-
|
| 78 |
-
if self.config.name in ["activations", "weights"]:
|
| 79 |
-
features_dict['data'] = datasets.Sequence(datasets.Value('float32'))
|
| 80 |
-
elif self.config_name in ["gradients", "gradients_mini"]:
|
| 81 |
-
features_dict['gradient_step'] = datasets.Value('int32')
|
| 82 |
-
features_dict['gradient'] = datasets.Sequence(datasets.Value('float32'))
|
| 83 |
-
|
| 84 |
-
features = datasets.Features(features_dict)
|
| 85 |
-
|
| 86 |
return datasets.DatasetInfo(
|
| 87 |
description=_DESCRIPTION,
|
| 88 |
-
features=features,
|
| 89 |
)
|
| 90 |
|
| 91 |
|
|
@@ -112,12 +89,12 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
|
|
| 112 |
|
| 113 |
if self.config.name == "activations":
|
| 114 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
|
| 115 |
-
elif self.
|
| 116 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
|
| 117 |
-
elif self.
|
| 118 |
for gradient_step in get_gradient_step(checkpoint_step):
|
| 119 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
|
| 120 |
-
elif self.
|
| 121 |
for gradient_step in get_gradient_step(checkpoint_step)[:2]:
|
| 122 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
|
| 123 |
else:
|
|
@@ -134,29 +111,27 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
|
|
| 134 |
) for model_size_name, downloaded_fps in downloaded_files.items()
|
| 135 |
]
|
| 136 |
|
| 137 |
-
def _generate_examples(self, filepaths
|
| 138 |
|
| 139 |
# the filepaths should be a list of filepaths
|
| 140 |
if isinstance(filepaths, str):
|
| 141 |
filepaths = [filepaths]
|
| 142 |
-
|
| 143 |
global_idx = 0 # the unique identifier for the example
|
| 144 |
|
| 145 |
for filepath in filepaths:
|
| 146 |
-
with open(filepath,
|
| 147 |
data = pickle.load(f)
|
| 148 |
|
| 149 |
# extract checkpoint step from the filepath
|
| 150 |
-
checkpoint_step = int(filepath.split("/")[
|
| 151 |
|
| 152 |
if self.config.name in ["activations", "weights"]:
|
| 153 |
for layer_name, layer_data in data.items():
|
| 154 |
-
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "data":
|
| 155 |
global_idx += 1
|
| 156 |
elif self.config.name in ["gradients", "gradients_mini"]:
|
| 157 |
-
|
| 158 |
gradient_step = int(filepath.split('/')[-1].split("_")[-1].split(".")[0])
|
| 159 |
-
|
| 160 |
for layer_name, layer_data in data.items():
|
| 161 |
-
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "gradient_step": gradient_step, "
|
| 162 |
global_idx += 1
|
|
|
|
| 1 |
import datasets
|
| 2 |
import pickle
|
| 3 |
|
|
|
|
| 4 |
_DESCRIPTION = """\
|
| 5 |
Dataset for storing training metrics of pythia models
|
| 6 |
"""
|
|
|
|
| 11 |
"70m",
|
| 12 |
"160m",
|
| 13 |
"410m",
|
|
|
|
| 14 |
"1.4b",
|
| 15 |
"2.8b",
|
|
|
|
| 16 |
]
|
| 17 |
|
| 18 |
_GRADIENTS_DESCRIPTION = """\
|
|
|
|
| 53 |
description=_WEIGHTS_DESCRIPTION,
|
| 54 |
version="1.0.0",
|
| 55 |
),
|
| 56 |
+
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
|
| 58 |
def _info(self):
|
| 59 |
"""
|
| 60 |
+
NOTE: we might want to specify features, but since the featuers are different for each
|
| 61 |
+
model size it's annoying and kind of pointless since hf does it automatically
|
|
|
|
| 62 |
"""
|
| 63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
return datasets.DatasetInfo(
|
| 65 |
description=_DESCRIPTION,
|
|
|
|
| 66 |
)
|
| 67 |
|
| 68 |
|
|
|
|
| 89 |
|
| 90 |
if self.config.name == "activations":
|
| 91 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
|
| 92 |
+
elif self.config.name == "weights":
|
| 93 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
|
| 94 |
+
elif self.config.name == "gradients":
|
| 95 |
for gradient_step in get_gradient_step(checkpoint_step):
|
| 96 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
|
| 97 |
+
elif self.config.name == "gradients_mini":
|
| 98 |
for gradient_step in get_gradient_step(checkpoint_step)[:2]:
|
| 99 |
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
|
| 100 |
else:
|
|
|
|
| 111 |
) for model_size_name, downloaded_fps in downloaded_files.items()
|
| 112 |
]
|
| 113 |
|
| 114 |
+
def _generate_examples(self, filepaths):
|
| 115 |
|
| 116 |
# the filepaths should be a list of filepaths
|
| 117 |
if isinstance(filepaths, str):
|
| 118 |
filepaths = [filepaths]
|
| 119 |
+
|
| 120 |
global_idx = 0 # the unique identifier for the example
|
| 121 |
|
| 122 |
for filepath in filepaths:
|
| 123 |
+
with open(filepath, 'rb') as f:
|
| 124 |
data = pickle.load(f)
|
| 125 |
|
| 126 |
# extract checkpoint step from the filepath
|
| 127 |
+
checkpoint_step = int(filepath.split("/")[-2].split("_")[-1])
|
| 128 |
|
| 129 |
if self.config.name in ["activations", "weights"]:
|
| 130 |
for layer_name, layer_data in data.items():
|
| 131 |
+
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "data": layer_data}
|
| 132 |
global_idx += 1
|
| 133 |
elif self.config.name in ["gradients", "gradients_mini"]:
|
|
|
|
| 134 |
gradient_step = int(filepath.split('/')[-1].split("_")[-1].split(".")[0])
|
|
|
|
| 135 |
for layer_name, layer_data in data.items():
|
| 136 |
+
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "gradient_step": gradient_step, "data": layer_data}
|
| 137 |
global_idx += 1
|