ppo-Huggy / run_logs /training_status.json
kparker's picture
Huggy
a85d1e1
{
"Huggy": {
"checkpoints": [
{
"steps": 199792,
"file_path": "results/Huggy/Huggy/Huggy-199792.onnx",
"reward": 3.754995104719381,
"creation_time": 1674042043.8783813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199792.pt"
]
},
{
"steps": 399443,
"file_path": "results/Huggy/Huggy/Huggy-399443.onnx",
"reward": 3.704182703648844,
"creation_time": 1674042253.6023955,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399443.pt"
]
},
{
"steps": 599960,
"file_path": "results/Huggy/Huggy/Huggy-599960.onnx",
"reward": 4.5305974666888895,
"creation_time": 1674042471.9123058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599960.pt"
]
},
{
"steps": 799889,
"file_path": "results/Huggy/Huggy/Huggy-799889.onnx",
"reward": 4.032345700379155,
"creation_time": 1674042693.7948,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799889.pt"
]
},
{
"steps": 999890,
"file_path": "results/Huggy/Huggy/Huggy-999890.onnx",
"reward": 3.8808024385607385,
"creation_time": 1674042917.2871163,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999890.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy/Huggy/Huggy-1199942.onnx",
"reward": 4.22659210501046,
"creation_time": 1674043139.0818977,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy/Huggy/Huggy-1399980.onnx",
"reward": 4.307024018331007,
"creation_time": 1674043364.2942336,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599973,
"file_path": "results/Huggy/Huggy/Huggy-1599973.onnx",
"reward": 3.6168893108415845,
"creation_time": 1674043589.6958926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599973.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy/Huggy/Huggy-1799987.onnx",
"reward": 3.872917072704205,
"creation_time": 1674043811.8442364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999936,
"file_path": "results/Huggy/Huggy/Huggy-1999936.onnx",
"reward": 3.72837704878587,
"creation_time": 1674044031.2085762,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999936.pt"
]
},
{
"steps": 2000054,
"file_path": "results/Huggy/Huggy/Huggy-2000054.onnx",
"reward": 3.7981815695762635,
"creation_time": 1674044031.321378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000054.pt"
]
}
],
"final_checkpoint": {
"steps": 2000054,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7981815695762635,
"creation_time": 1674044031.321378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000054.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}