ppo-Huggy / run_logs /training_status.json
jadericdawson's picture
Huggy
c5fed92 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199903,
"file_path": "results/Huggy2/Huggy/Huggy-199903.onnx",
"reward": 2.160845018568493,
"creation_time": 1739806961.2770543,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199903.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 4.513391637802124,
"creation_time": 1739807220.1167805,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599850,
"file_path": "results/Huggy2/Huggy/Huggy-599850.onnx",
"reward": 3.3021232954661053,
"creation_time": 1739807456.0930681,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599850.pt"
]
},
{
"steps": 799853,
"file_path": "results/Huggy2/Huggy/Huggy-799853.onnx",
"reward": 3.1651441073808515,
"creation_time": 1739807703.9732208,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799853.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy2/Huggy/Huggy-999991.onnx",
"reward": 3.595023920902839,
"creation_time": 1739807957.4252005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199981,
"file_path": "results/Huggy2/Huggy/Huggy-1199981.onnx",
"reward": 3.9502364240843675,
"creation_time": 1739808208.05197,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199981.pt"
]
},
{
"steps": 1399895,
"file_path": "results/Huggy2/Huggy/Huggy-1399895.onnx",
"reward": 4.128851723797778,
"creation_time": 1739808463.9715292,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399895.pt"
]
},
{
"steps": 1599895,
"file_path": "results/Huggy2/Huggy/Huggy-1599895.onnx",
"reward": 3.8516277432441712,
"creation_time": 1739808736.8155146,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599895.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy2/Huggy/Huggy-1799982.onnx",
"reward": 3.703496104784501,
"creation_time": 1739809008.8518417,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.849137510381528,
"creation_time": 1739809275.2461488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy2/Huggy/Huggy-2000073.onnx",
"reward": 3.8613419404736273,
"creation_time": 1739809275.4182441,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8613419404736273,
"creation_time": 1739809275.4182441,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}