ppo-Huggy / run_logs /training_status.json
Zakia's picture
Huggy
b0d0a8e
{
"Huggy": {
"checkpoints": [
{
"steps": 199740,
"file_path": "results/Huggy/Huggy/Huggy-199740.onnx",
"reward": 3.2166160866618156,
"creation_time": 1700338822.1261632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199740.pt"
]
},
{
"steps": 399932,
"file_path": "results/Huggy/Huggy/Huggy-399932.onnx",
"reward": 3.994652034943564,
"creation_time": 1700339057.13681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399932.pt"
]
},
{
"steps": 599937,
"file_path": "results/Huggy/Huggy/Huggy-599937.onnx",
"reward": 5.199607244559696,
"creation_time": 1700339294.2282345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599937.pt"
]
},
{
"steps": 799979,
"file_path": "results/Huggy/Huggy/Huggy-799979.onnx",
"reward": 3.8154952282255348,
"creation_time": 1700339533.4235332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799979.pt"
]
},
{
"steps": 999846,
"file_path": "results/Huggy/Huggy/Huggy-999846.onnx",
"reward": 3.8193339574946106,
"creation_time": 1700339776.9504032,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999846.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy/Huggy/Huggy-1199963.onnx",
"reward": 4.0285941134480865,
"creation_time": 1700340023.4733603,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy/Huggy/Huggy-1399996.onnx",
"reward": 3.9556598062398005,
"creation_time": 1700340266.190634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599990,
"file_path": "results/Huggy/Huggy/Huggy-1599990.onnx",
"reward": 3.841392871676659,
"creation_time": 1700340510.9031684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599990.pt"
]
},
{
"steps": 1799664,
"file_path": "results/Huggy/Huggy/Huggy-1799664.onnx",
"reward": 3.6544476780322714,
"creation_time": 1700340759.2677255,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799664.pt"
]
},
{
"steps": 1999983,
"file_path": "results/Huggy/Huggy/Huggy-1999983.onnx",
"reward": 3.7444230034947394,
"creation_time": 1700341000.984011,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999983.pt"
]
},
{
"steps": 2000087,
"file_path": "results/Huggy/Huggy/Huggy-2000087.onnx",
"reward": 3.818763216821159,
"creation_time": 1700341001.0863807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000087.pt"
]
}
],
"final_checkpoint": {
"steps": 2000087,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.818763216821159,
"creation_time": 1700341001.0863807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000087.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}