ppo-Huggy / run_logs /training_status.json
kikijiki's picture
Huggy
d843abb
{
"Huggy": {
"checkpoints": [
{
"steps": 199954,
"file_path": "results/Huggy/Huggy/Huggy-199954.onnx",
"reward": 3.615296757410443,
"creation_time": 1679548878.7532868,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199954.pt"
]
},
{
"steps": 399851,
"file_path": "results/Huggy/Huggy/Huggy-399851.onnx",
"reward": 3.6156066235374,
"creation_time": 1679549108.656894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399851.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy/Huggy/Huggy-599963.onnx",
"reward": 3.710899233818054,
"creation_time": 1679549341.3161702,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799323,
"file_path": "results/Huggy/Huggy/Huggy-799323.onnx",
"reward": 3.6831069695949554,
"creation_time": 1679549570.2563694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799323.pt"
]
},
{
"steps": 999917,
"file_path": "results/Huggy/Huggy/Huggy-999917.onnx",
"reward": 3.6357675520311883,
"creation_time": 1679549804.1792037,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999917.pt"
]
},
{
"steps": 1199922,
"file_path": "results/Huggy/Huggy/Huggy-1199922.onnx",
"reward": 3.3054558783769608,
"creation_time": 1679550037.0108907,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199922.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy/Huggy/Huggy-1399968.onnx",
"reward": 3.6087507373928407,
"creation_time": 1679550266.287225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599909,
"file_path": "results/Huggy/Huggy/Huggy-1599909.onnx",
"reward": 3.707769066095352,
"creation_time": 1679550498.5267308,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599909.pt"
]
},
{
"steps": 1799993,
"file_path": "results/Huggy/Huggy/Huggy-1799993.onnx",
"reward": 3.6530143320560455,
"creation_time": 1679550731.1717372,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799993.pt"
]
},
{
"steps": 1999918,
"file_path": "results/Huggy/Huggy/Huggy-1999918.onnx",
"reward": 3.8727016150951385,
"creation_time": 1679550963.7672498,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999918.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy/Huggy/Huggy-2000034.onnx",
"reward": 3.9515540656589327,
"creation_time": 1679550963.957265,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9515540656589327,
"creation_time": 1679550963.957265,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}