ppo-Huggy / run_logs /training_status.json
abdullahhatem's picture
Huggy
6d53045 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199882,
"file_path": "results/Huggy2/Huggy/Huggy-199882.onnx",
"reward": 3.219921289928376,
"creation_time": 1740312831.722542,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199882.pt"
]
},
{
"steps": 399976,
"file_path": "results/Huggy2/Huggy/Huggy-399976.onnx",
"reward": 3.848973134816703,
"creation_time": 1740313085.8937492,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399976.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy2/Huggy/Huggy-599963.onnx",
"reward": 3.4898789687590166,
"creation_time": 1740313338.8565319,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799966,
"file_path": "results/Huggy2/Huggy/Huggy-799966.onnx",
"reward": 3.789694464142947,
"creation_time": 1740313592.566148,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799966.pt"
]
},
{
"steps": 999980,
"file_path": "results/Huggy2/Huggy/Huggy-999980.onnx",
"reward": 3.8811296725618667,
"creation_time": 1740313852.5240445,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999980.pt"
]
},
{
"steps": 1199913,
"file_path": "results/Huggy2/Huggy/Huggy-1199913.onnx",
"reward": 3.7745438290805353,
"creation_time": 1740314111.0223353,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199913.pt"
]
},
{
"steps": 1399894,
"file_path": "results/Huggy2/Huggy/Huggy-1399894.onnx",
"reward": 3.6931326627731322,
"creation_time": 1740314371.7923,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399894.pt"
]
},
{
"steps": 1599996,
"file_path": "results/Huggy2/Huggy/Huggy-1599996.onnx",
"reward": 4.009005762747864,
"creation_time": 1740314626.8212306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599996.pt"
]
},
{
"steps": 1799919,
"file_path": "results/Huggy2/Huggy/Huggy-1799919.onnx",
"reward": 4.0005736375848455,
"creation_time": 1740314884.0451052,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799919.pt"
]
},
{
"steps": 1999965,
"file_path": "results/Huggy2/Huggy/Huggy-1999965.onnx",
"reward": 3.604427305551676,
"creation_time": 1740315142.35268,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999965.pt"
]
},
{
"steps": 2000002,
"file_path": "results/Huggy2/Huggy/Huggy-2000002.onnx",
"reward": 3.593333703167034,
"creation_time": 1740315142.4742968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000002.pt"
]
}
],
"final_checkpoint": {
"steps": 2000002,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.593333703167034,
"creation_time": 1740315142.4742968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}