ppo-Huggy / run_logs /training_status.json
Delview's picture
Huggy
d82b2f3 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199908,
"file_path": "results/Huggy2/Huggy/Huggy-199908.onnx",
"reward": 3.409526271606559,
"creation_time": 1709211804.3455973,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199908.pt"
]
},
{
"steps": 399983,
"file_path": "results/Huggy2/Huggy/Huggy-399983.onnx",
"reward": 3.8304943469437687,
"creation_time": 1709212034.7884715,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399983.pt"
]
},
{
"steps": 599861,
"file_path": "results/Huggy2/Huggy/Huggy-599861.onnx",
"reward": 3.8438420626852245,
"creation_time": 1709212265.6047413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599861.pt"
]
},
{
"steps": 799958,
"file_path": "results/Huggy2/Huggy/Huggy-799958.onnx",
"reward": 3.8096066161547557,
"creation_time": 1709212493.0045364,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799958.pt"
]
},
{
"steps": 999974,
"file_path": "results/Huggy2/Huggy/Huggy-999974.onnx",
"reward": 3.8008825644089357,
"creation_time": 1709212726.005507,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999974.pt"
]
},
{
"steps": 1199912,
"file_path": "results/Huggy2/Huggy/Huggy-1199912.onnx",
"reward": 3.7768067448911533,
"creation_time": 1709212960.4759161,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199912.pt"
]
},
{
"steps": 1399875,
"file_path": "results/Huggy2/Huggy/Huggy-1399875.onnx",
"reward": 4.316976005380804,
"creation_time": 1709213193.8573856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399875.pt"
]
},
{
"steps": 1599990,
"file_path": "results/Huggy2/Huggy/Huggy-1599990.onnx",
"reward": 3.7032399810090357,
"creation_time": 1709213423.7402372,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599990.pt"
]
},
{
"steps": 1799261,
"file_path": "results/Huggy2/Huggy/Huggy-1799261.onnx",
"reward": 3.8381670298783677,
"creation_time": 1709213659.8213544,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799261.pt"
]
},
{
"steps": 1999927,
"file_path": "results/Huggy2/Huggy/Huggy-1999927.onnx",
"reward": 3.964328599969546,
"creation_time": 1709213896.7406154,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999927.pt"
]
},
{
"steps": 2000045,
"file_path": "results/Huggy2/Huggy/Huggy-2000045.onnx",
"reward": 3.996802198105171,
"creation_time": 1709213896.8627415,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000045.pt"
]
}
],
"final_checkpoint": {
"steps": 2000045,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.996802198105171,
"creation_time": 1709213896.8627415,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000045.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}