ppo-Huggy / run_logs /training_status.json
Max87152's picture
Huggy
794a2fc verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199808,
"file_path": "results/Huggy2/Huggy/Huggy-199808.onnx",
"reward": 3.410765748023987,
"creation_time": 1714654384.4499607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199808.pt"
]
},
{
"steps": 399936,
"file_path": "results/Huggy2/Huggy/Huggy-399936.onnx",
"reward": 3.582409833392052,
"creation_time": 1714654626.159831,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399936.pt"
]
},
{
"steps": 599842,
"file_path": "results/Huggy2/Huggy/Huggy-599842.onnx",
"reward": 3.824527621269226,
"creation_time": 1714654872.2781315,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599842.pt"
]
},
{
"steps": 799911,
"file_path": "results/Huggy2/Huggy/Huggy-799911.onnx",
"reward": 3.998348655303319,
"creation_time": 1714655116.4042552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799911.pt"
]
},
{
"steps": 999907,
"file_path": "results/Huggy2/Huggy/Huggy-999907.onnx",
"reward": 3.8996231284057883,
"creation_time": 1714655363.2233899,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999907.pt"
]
},
{
"steps": 1199892,
"file_path": "results/Huggy2/Huggy/Huggy-1199892.onnx",
"reward": 3.399199937519274,
"creation_time": 1714655607.188014,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199892.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy2/Huggy/Huggy-1399933.onnx",
"reward": 3.096571445465088,
"creation_time": 1714655866.4131124,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599920,
"file_path": "results/Huggy2/Huggy/Huggy-1599920.onnx",
"reward": 3.6890127533747825,
"creation_time": 1714656112.9243164,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599920.pt"
]
},
{
"steps": 1799795,
"file_path": "results/Huggy2/Huggy/Huggy-1799795.onnx",
"reward": 3.805698135024623,
"creation_time": 1714656361.044344,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799795.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy2/Huggy/Huggy-1999952.onnx",
"reward": 3.6845036434090654,
"creation_time": 1714656605.6083162,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy2/Huggy/Huggy-2000022.onnx",
"reward": 3.6375597566366196,
"creation_time": 1714656605.7387514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6375597566366196,
"creation_time": 1714656605.7387514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}