ppo-Huggy / run_logs /training_status.json
Yaman42's picture
Huggy
18352e0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199848,
"file_path": "results/Huggy42/Huggy/Huggy-199848.onnx",
"reward": 3.362764217351612,
"creation_time": 1740379280.72234,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-199848.pt"
]
},
{
"steps": 399917,
"file_path": "results/Huggy42/Huggy/Huggy-399917.onnx",
"reward": 3.6186559218626755,
"creation_time": 1740379532.3216622,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-399917.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy42/Huggy/Huggy-599944.onnx",
"reward": 4.101396453380585,
"creation_time": 1740379783.6343012,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799358,
"file_path": "results/Huggy42/Huggy/Huggy-799358.onnx",
"reward": 4.075060218911279,
"creation_time": 1740380030.5201607,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-799358.pt"
]
},
{
"steps": 999967,
"file_path": "results/Huggy42/Huggy/Huggy-999967.onnx",
"reward": 3.766227330598566,
"creation_time": 1740380280.4003167,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-999967.pt"
]
},
{
"steps": 1199899,
"file_path": "results/Huggy42/Huggy/Huggy-1199899.onnx",
"reward": 4.202163961198595,
"creation_time": 1740380530.5008109,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-1199899.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy42/Huggy/Huggy-1399957.onnx",
"reward": 6.278035640716553,
"creation_time": 1740380783.3050318,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599817,
"file_path": "results/Huggy42/Huggy/Huggy-1599817.onnx",
"reward": 3.9447855250809782,
"creation_time": 1740381027.3605638,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-1599817.pt"
]
},
{
"steps": 1799895,
"file_path": "results/Huggy42/Huggy/Huggy-1799895.onnx",
"reward": 3.8950829281095873,
"creation_time": 1740381280.8449183,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-1799895.pt"
]
},
{
"steps": 1999732,
"file_path": "results/Huggy42/Huggy/Huggy-1999732.onnx",
"reward": 3.601656436920166,
"creation_time": 1740381537.54293,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-1999732.pt"
]
},
{
"steps": 2000482,
"file_path": "results/Huggy42/Huggy/Huggy-2000482.onnx",
"reward": 3.4511725378036497,
"creation_time": 1740381537.6994557,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-2000482.pt"
]
}
],
"final_checkpoint": {
"steps": 2000482,
"file_path": "results/Huggy42/Huggy.onnx",
"reward": 3.4511725378036497,
"creation_time": 1740381537.6994557,
"auxillary_file_paths": [
"results/Huggy42/Huggy/Huggy-2000482.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}