ppo-Huggy / run_logs /training_status.json
SierraII's picture
Huggy
cb190af verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199715,
"file_path": "results/Huggy2/Huggy/Huggy-199715.onnx",
"reward": 3.9608242663741113,
"creation_time": 1740549922.8447132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199715.pt"
]
},
{
"steps": 399612,
"file_path": "results/Huggy2/Huggy/Huggy-399612.onnx",
"reward": 3.767807410708789,
"creation_time": 1740550176.4137902,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399612.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy2/Huggy/Huggy-599998.onnx",
"reward": 3.8016722482793472,
"creation_time": 1740550433.3315814,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799972,
"file_path": "results/Huggy2/Huggy/Huggy-799972.onnx",
"reward": 3.522121361428243,
"creation_time": 1740550681.193421,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799972.pt"
]
},
{
"steps": 999932,
"file_path": "results/Huggy2/Huggy/Huggy-999932.onnx",
"reward": 3.698152403648083,
"creation_time": 1740550936.9129221,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999932.pt"
]
},
{
"steps": 1199935,
"file_path": "results/Huggy2/Huggy/Huggy-1199935.onnx",
"reward": 3.755337103324778,
"creation_time": 1740551188.7046142,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199935.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy2/Huggy/Huggy-1399985.onnx",
"reward": 3.57205793261528,
"creation_time": 1740551432.5082612,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599273,
"file_path": "results/Huggy2/Huggy/Huggy-1599273.onnx",
"reward": 3.6585230579850267,
"creation_time": 1740551673.9167123,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599273.pt"
]
},
{
"steps": 1799839,
"file_path": "results/Huggy2/Huggy/Huggy-1799839.onnx",
"reward": 3.732029253108935,
"creation_time": 1740551926.2802327,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799839.pt"
]
},
{
"steps": 1999956,
"file_path": "results/Huggy2/Huggy/Huggy-1999956.onnx",
"reward": 4.5668374403663305,
"creation_time": 1740552176.970758,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999956.pt"
]
},
{
"steps": 2000072,
"file_path": "results/Huggy2/Huggy/Huggy-2000072.onnx",
"reward": 4.613952681422234,
"creation_time": 1740552177.096416,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000072.pt"
]
}
],
"final_checkpoint": {
"steps": 2000072,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.613952681422234,
"creation_time": 1740552177.096416,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000072.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}