ppo-Huggy-v1 / run_logs /training_status.json
Adi-AI-2005's picture
Huggy is ready
7a29233 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199900,
"file_path": "results/Huggy2/Huggy/Huggy-199900.onnx",
"reward": 3.099206975528172,
"creation_time": 1734787608.4228384,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199900.pt"
]
},
{
"steps": 399770,
"file_path": "results/Huggy2/Huggy/Huggy-399770.onnx",
"reward": 4.060144780079524,
"creation_time": 1734787859.3509731,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399770.pt"
]
},
{
"steps": 599980,
"file_path": "results/Huggy2/Huggy/Huggy-599980.onnx",
"reward": 4.165395021438599,
"creation_time": 1734788110.7426133,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599980.pt"
]
},
{
"steps": 799890,
"file_path": "results/Huggy2/Huggy/Huggy-799890.onnx",
"reward": 3.740216094797308,
"creation_time": 1734788357.727559,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799890.pt"
]
},
{
"steps": 999661,
"file_path": "results/Huggy2/Huggy/Huggy-999661.onnx",
"reward": 3.782866870729547,
"creation_time": 1734788619.0054934,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999661.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy2/Huggy/Huggy-1199995.onnx",
"reward": 3.843800645807515,
"creation_time": 1734788877.547304,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399950,
"file_path": "results/Huggy2/Huggy/Huggy-1399950.onnx",
"reward": 3.695393439909307,
"creation_time": 1734789137.1625776,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399950.pt"
]
},
{
"steps": 1599958,
"file_path": "results/Huggy2/Huggy/Huggy-1599958.onnx",
"reward": 3.627192144884783,
"creation_time": 1734789405.0924618,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599958.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.545337485350095,
"creation_time": 1734789669.9109583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999664,
"file_path": "results/Huggy2/Huggy/Huggy-1999664.onnx",
"reward": 3.9276025815707882,
"creation_time": 1734789929.2956452,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999664.pt"
]
},
{
"steps": 2000414,
"file_path": "results/Huggy2/Huggy/Huggy-2000414.onnx",
"reward": 3.8948981157205638,
"creation_time": 1734789929.4515655,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000414.pt"
]
}
],
"final_checkpoint": {
"steps": 2000414,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8948981157205638,
"creation_time": 1734789929.4515655,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000414.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}