ppo-Huggy / run_logs /training_status.json
kailashsp's picture
Huggy
238e343
{
"Huggy": {
"checkpoints": [
{
"steps": 199886,
"file_path": "results/Huggy/Huggy/Huggy-199886.onnx",
"reward": 3.610569397608439,
"creation_time": 1678271024.3603053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199886.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy/Huggy/Huggy-399946.onnx",
"reward": 3.780143035252889,
"creation_time": 1678271277.390215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599902,
"file_path": "results/Huggy/Huggy/Huggy-599902.onnx",
"reward": 4.0081637716293335,
"creation_time": 1678271544.6037395,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599902.pt"
]
},
{
"steps": 799919,
"file_path": "results/Huggy/Huggy/Huggy-799919.onnx",
"reward": 3.7504406443664005,
"creation_time": 1678271801.6623018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799919.pt"
]
},
{
"steps": 999989,
"file_path": "results/Huggy/Huggy/Huggy-999989.onnx",
"reward": 3.5772487359835687,
"creation_time": 1678272058.4986734,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999989.pt"
]
},
{
"steps": 1199378,
"file_path": "results/Huggy/Huggy/Huggy-1199378.onnx",
"reward": 3.428451211635883,
"creation_time": 1678272314.0078878,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199378.pt"
]
},
{
"steps": 1399367,
"file_path": "results/Huggy/Huggy/Huggy-1399367.onnx",
"reward": 4.234970728556315,
"creation_time": 1678272570.754984,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399367.pt"
]
},
{
"steps": 1599624,
"file_path": "results/Huggy/Huggy/Huggy-1599624.onnx",
"reward": 3.8433212710149363,
"creation_time": 1678272830.548677,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599624.pt"
]
},
{
"steps": 1799943,
"file_path": "results/Huggy/Huggy/Huggy-1799943.onnx",
"reward": 3.731589797410098,
"creation_time": 1678273091.6835353,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799943.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy/Huggy/Huggy-1999980.onnx",
"reward": 3.7603126229911017,
"creation_time": 1678273349.8974845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000051,
"file_path": "results/Huggy/Huggy/Huggy-2000051.onnx",
"reward": 3.7123398462931316,
"creation_time": 1678273350.01598,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000051.pt"
]
}
],
"final_checkpoint": {
"steps": 2000051,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7123398462931316,
"creation_time": 1678273350.01598,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000051.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}