ppo-Huggy / run_logs /training_status.json
RicardoMorim's picture
Huggy
f49226b verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199969,
"file_path": "results/Huggy2/Huggy/Huggy-199969.onnx",
"reward": 3.4992238304831766,
"creation_time": 1719088666.3473754,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199969.pt"
]
},
{
"steps": 399952,
"file_path": "results/Huggy2/Huggy/Huggy-399952.onnx",
"reward": 3.4146444126963615,
"creation_time": 1719088927.523908,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399952.pt"
]
},
{
"steps": 599633,
"file_path": "results/Huggy2/Huggy/Huggy-599633.onnx",
"reward": 4.062182386716207,
"creation_time": 1719089194.649979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599633.pt"
]
},
{
"steps": 799917,
"file_path": "results/Huggy2/Huggy/Huggy-799917.onnx",
"reward": 3.857176386063395,
"creation_time": 1719089458.2291875,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799917.pt"
]
},
{
"steps": 999739,
"file_path": "results/Huggy2/Huggy/Huggy-999739.onnx",
"reward": 3.726090683155701,
"creation_time": 1719089726.9670005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999739.pt"
]
},
{
"steps": 1199953,
"file_path": "results/Huggy2/Huggy/Huggy-1199953.onnx",
"reward": 3.9759466861778834,
"creation_time": 1719089994.77615,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199953.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy2/Huggy/Huggy-1399984.onnx",
"reward": 3.809895778111383,
"creation_time": 1719090260.867843,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599963,
"file_path": "results/Huggy2/Huggy/Huggy-1599963.onnx",
"reward": 3.682235802397316,
"creation_time": 1719090529.9116464,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599963.pt"
]
},
{
"steps": 1799889,
"file_path": "results/Huggy2/Huggy/Huggy-1799889.onnx",
"reward": 3.6941803510372457,
"creation_time": 1719090799.3998044,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799889.pt"
]
},
{
"steps": 1999967,
"file_path": "results/Huggy2/Huggy/Huggy-1999967.onnx",
"reward": 3.790853125708444,
"creation_time": 1719091068.7540002,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy2/Huggy/Huggy-2000020.onnx",
"reward": 3.7962041876532813,
"creation_time": 1719091068.9305832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7962041876532813,
"creation_time": 1719091068.9305832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}