ppo-Huggy / run_logs /training_status.json
rjhuang's picture
Huggy
3117af6 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199826,
"file_path": "results/Huggy2/Huggy/Huggy-199826.onnx",
"reward": 3.7012878638325315,
"creation_time": 1740208294.5129063,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199826.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy2/Huggy/Huggy-399981.onnx",
"reward": 3.532284898813381,
"creation_time": 1740208542.3584855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy2/Huggy/Huggy-599998.onnx",
"reward": 4.134400027769583,
"creation_time": 1740208789.5453637,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799928,
"file_path": "results/Huggy2/Huggy/Huggy-799928.onnx",
"reward": 3.6995640668569436,
"creation_time": 1740209037.7369356,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799928.pt"
]
},
{
"steps": 999894,
"file_path": "results/Huggy2/Huggy/Huggy-999894.onnx",
"reward": 3.9362998750151657,
"creation_time": 1740209294.3313997,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999894.pt"
]
},
{
"steps": 1199927,
"file_path": "results/Huggy2/Huggy/Huggy-1199927.onnx",
"reward": 4.041434555722956,
"creation_time": 1740209552.5495994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199927.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Huggy2/Huggy/Huggy-1399979.onnx",
"reward": null,
"creation_time": 1740209805.5078905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599387,
"file_path": "results/Huggy2/Huggy/Huggy-1599387.onnx",
"reward": 3.7482278445187736,
"creation_time": 1740210053.2875552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599387.pt"
]
},
{
"steps": 1799990,
"file_path": "results/Huggy2/Huggy/Huggy-1799990.onnx",
"reward": 3.6029646655575176,
"creation_time": 1740210301.639756,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799990.pt"
]
},
{
"steps": 1999970,
"file_path": "results/Huggy2/Huggy/Huggy-1999970.onnx",
"reward": 3.7265276653426036,
"creation_time": 1740210547.8887694,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999970.pt"
]
},
{
"steps": 2000036,
"file_path": "results/Huggy2/Huggy/Huggy-2000036.onnx",
"reward": 3.7186062210484554,
"creation_time": 1740210548.0798237,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000036.pt"
]
}
],
"final_checkpoint": {
"steps": 2000036,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7186062210484554,
"creation_time": 1740210548.0798237,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000036.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}