ppo-Huggy / run_logs /training_status.json
Raaniel's picture
Huggy
a1910a1
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199922,
"file_path": "results/Huggy/Huggy/Huggy-199922.onnx",
"reward": 3.1766347957068475,
"creation_time": 1687604173.336835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199922.pt"
]
},
{
"steps": 399853,
"file_path": "results/Huggy/Huggy/Huggy-399853.onnx",
"reward": 3.546895463415917,
"creation_time": 1687604412.2609398,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399853.pt"
]
},
{
"steps": 599677,
"file_path": "results/Huggy/Huggy/Huggy-599677.onnx",
"reward": 3.711298495531082,
"creation_time": 1687604645.42775,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599677.pt"
]
},
{
"steps": 799923,
"file_path": "results/Huggy/Huggy/Huggy-799923.onnx",
"reward": 3.9375846269654065,
"creation_time": 1687604891.27807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799923.pt"
]
},
{
"steps": 999989,
"file_path": "results/Huggy/Huggy/Huggy-999989.onnx",
"reward": 3.5249368802954755,
"creation_time": 1687605136.1517735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999989.pt"
]
},
{
"steps": 1199917,
"file_path": "results/Huggy/Huggy/Huggy-1199917.onnx",
"reward": 3.501825930441127,
"creation_time": 1687605371.2740347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199917.pt"
]
},
{
"steps": 1399335,
"file_path": "results/Huggy/Huggy/Huggy-1399335.onnx",
"reward": 3.7793275658658008,
"creation_time": 1687605595.04137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399335.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy/Huggy/Huggy-1599982.onnx",
"reward": 3.6086598790965034,
"creation_time": 1687605852.9751089,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799403,
"file_path": "results/Huggy/Huggy/Huggy-1799403.onnx",
"reward": 3.134282083615013,
"creation_time": 1687606096.618556,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799403.pt"
]
},
{
"steps": 1999499,
"file_path": "results/Huggy/Huggy/Huggy-1999499.onnx",
"reward": 3.500092261715939,
"creation_time": 1687606334.5242038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999499.pt"
]
},
{
"steps": 2000249,
"file_path": "results/Huggy/Huggy/Huggy-2000249.onnx",
"reward": 3.4586035597558116,
"creation_time": 1687606334.7959223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000249.pt"
]
}
],
"final_checkpoint": {
"steps": 2000249,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4586035597558116,
"creation_time": 1687606334.7959223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000249.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}