ppo-Huggy / run_logs /training_status.json
norsu's picture
Huggy
7d674a1 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199995,
"file_path": "results/Huggy2/Huggy/Huggy-199995.onnx",
"reward": 3.2927988208830357,
"creation_time": 1708374292.8591747,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199995.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy2/Huggy/Huggy-399984.onnx",
"reward": 3.8956145284504724,
"creation_time": 1708374534.0587125,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy2/Huggy/Huggy-599962.onnx",
"reward": 3.74066232641538,
"creation_time": 1708374781.0058105,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799998,
"file_path": "results/Huggy2/Huggy/Huggy-799998.onnx",
"reward": 3.812559921884797,
"creation_time": 1708375028.8254104,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799998.pt"
]
},
{
"steps": 999988,
"file_path": "results/Huggy2/Huggy/Huggy-999988.onnx",
"reward": 4.093284570078813,
"creation_time": 1708375296.4910598,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999988.pt"
]
},
{
"steps": 1199824,
"file_path": "results/Huggy2/Huggy/Huggy-1199824.onnx",
"reward": 4.0464977540221865,
"creation_time": 1708375577.917234,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199824.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy2/Huggy/Huggy-1399969.onnx",
"reward": 4.069404104492695,
"creation_time": 1708375854.3617449,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599948,
"file_path": "results/Huggy2/Huggy/Huggy-1599948.onnx",
"reward": 3.78865222964968,
"creation_time": 1708376126.0820131,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599948.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.8584734288354716,
"creation_time": 1708376380.9295137,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999592,
"file_path": "results/Huggy2/Huggy/Huggy-1999592.onnx",
"reward": 3.660478052638826,
"creation_time": 1708376635.5177288,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999592.pt"
]
},
{
"steps": 2000342,
"file_path": "results/Huggy2/Huggy/Huggy-2000342.onnx",
"reward": 3.491485584613889,
"creation_time": 1708376635.6731026,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000342.pt"
]
}
],
"final_checkpoint": {
"steps": 2000342,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.491485584613889,
"creation_time": 1708376635.6731026,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000342.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.0+cu121"
}
}