pco-huggy / run_logs /training_status.json
ummagumm-a's picture
Huggy
c895a19
{
"Huggy": {
"checkpoints": [
{
"steps": 199851,
"file_path": "results/Huggy/Huggy/Huggy-199851.onnx",
"reward": 3.102358141187894,
"creation_time": 1679652888.4636345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199851.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy/Huggy/Huggy-399934.onnx",
"reward": 3.7080676095527516,
"creation_time": 1679653097.7598844,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599930,
"file_path": "results/Huggy/Huggy/Huggy-599930.onnx",
"reward": 3.9260609290179085,
"creation_time": 1679653309.6091273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599930.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy/Huggy/Huggy-799985.onnx",
"reward": 3.737696355961739,
"creation_time": 1679653520.3847492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999931,
"file_path": "results/Huggy/Huggy/Huggy-999931.onnx",
"reward": 3.768773115072094,
"creation_time": 1679653735.8146522,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999931.pt"
]
},
{
"steps": 1199934,
"file_path": "results/Huggy/Huggy/Huggy-1199934.onnx",
"reward": 3.925837509907209,
"creation_time": 1679653950.2190115,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199934.pt"
]
},
{
"steps": 1399988,
"file_path": "results/Huggy/Huggy/Huggy-1399988.onnx",
"reward": 3.866460906277443,
"creation_time": 1679654163.0134199,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399988.pt"
]
},
{
"steps": 1599860,
"file_path": "results/Huggy/Huggy/Huggy-1599860.onnx",
"reward": 3.9423277813397095,
"creation_time": 1679654380.4310193,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599860.pt"
]
},
{
"steps": 1799962,
"file_path": "results/Huggy/Huggy/Huggy-1799962.onnx",
"reward": 3.6766849851186296,
"creation_time": 1679654596.8347943,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799962.pt"
]
},
{
"steps": 1999944,
"file_path": "results/Huggy/Huggy/Huggy-1999944.onnx",
"reward": 3.8895927202410814,
"creation_time": 1679654814.0059566,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999944.pt"
]
},
{
"steps": 2000002,
"file_path": "results/Huggy/Huggy/Huggy-2000002.onnx",
"reward": 3.869506543590909,
"creation_time": 1679654814.1272287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
],
"final_checkpoint": {
"steps": 2000002,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.869506543590909,
"creation_time": 1679654814.1272287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}