poca-SoccerTwos / run_logs /timers.json
lunared473's picture
First Push`
ae0ff2e
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.9135105609893799,
"min": 1.8966606855392456,
"max": 3.295762777328491,
"count": 500
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 37351.7265625,
"min": 28621.5078125,
"max": 130955.078125,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 51.4375,
"min": 39.604838709677416,
"max": 999.0,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19752.0,
"min": 10652.0,
"max": 30012.0,
"count": 500
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1487.5214108661778,
"min": 1191.3766706051888,
"max": 1489.7959330171645,
"count": 496
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 285604.11088630615,
"min": 2390.072751685243,
"max": 361218.1607592035,
"count": 496
},
"SoccerTwos.Step.mean": {
"value": 4999964.0,
"min": 9022.0,
"max": 4999964.0,
"count": 500
},
"SoccerTwos.Step.sum": {
"value": 4999964.0,
"min": 9022.0,
"max": 4999964.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.04535470902919769,
"min": -0.11993592232465744,
"max": 0.19004863500595093,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 8.662749290466309,
"min": -23.748119354248047,
"max": 27.715343475341797,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.04250567778944969,
"min": -0.1142832413315773,
"max": 0.19538632035255432,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 8.118584632873535,
"min": -23.947742462158203,
"max": 27.989952087402344,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.022307857168906646,
"min": -0.6929510211458012,
"max": 0.46573613371167866,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -4.260800719261169,
"min": -63.485599994659424,
"max": 55.42259991168976,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.022307857168906646,
"min": -0.6929510211458012,
"max": 0.46573613371167866,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -4.260800719261169,
"min": -63.485599994659424,
"max": 55.42259991168976,
"count": 500
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.016151578600207963,
"min": 0.011049103082041257,
"max": 0.0235756105791855,
"count": 240
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.016151578600207963,
"min": 0.011049103082041257,
"max": 0.0235756105791855,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10882534260551134,
"min": 0.0009062568754112969,
"max": 0.12459699138998985,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10882534260551134,
"min": 0.0009062568754112969,
"max": 0.12459699138998985,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1109501118461291,
"min": 0.0009173607521612818,
"max": 0.12684522792696953,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1109501118461291,
"min": 0.0009173607521612818,
"max": 0.12684522792696953,
"count": 240
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 240
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 240
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 240
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 240
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1677071778",
"python_version": "3.9.16 (main, Jan 11 2023, 16:16:36) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\emanu\\miniconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.exe --run-id=poca-SoccerTwos --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.13.1+cu116",
"numpy_version": "1.21.2",
"end_time_seconds": "1677077594"
},
"total": 5815.6003114000005,
"count": 1,
"self": 0.2012739000010697,
"children": {
"run_training.setup": {
"total": 0.07008039999999993,
"count": 1,
"self": 0.07008039999999993
},
"TrainerController.start_learning": {
"total": 5815.3289571,
"count": 1,
"self": 4.1021847000583875,
"children": {
"TrainerController._reset_env": {
"total": 3.2602931999977818,
"count": 25,
"self": 3.2602931999977818
},
"TrainerController.advance": {
"total": 5807.8198382999435,
"count": 343126,
"self": 3.8975711998018596,
"children": {
"env_step": {
"total": 4052.742351500123,
"count": 343126,
"self": 2462.6775483003303,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1587.5271079999739,
"count": 343126,
"self": 27.31333789962173,
"children": {
"TorchPolicy.evaluate": {
"total": 1560.2137701003521,
"count": 632066,
"self": 1560.2137701003521
}
}
},
"workers": {
"total": 2.537695199818991,
"count": 343126,
"self": 0.0,
"children": {
"worker_root": {
"total": 5807.592962700092,
"count": 343126,
"is_parallel": true,
"self": 3826.715853800153,
"children": {
"steps_from_proto": {
"total": 0.029419499998849297,
"count": 50,
"is_parallel": true,
"self": 0.005730799995375291,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.023688700003474006,
"count": 200,
"is_parallel": true,
"self": 0.023688700003474006
}
}
},
"UnityEnvironment.step": {
"total": 1980.8476893999402,
"count": 343126,
"is_parallel": true,
"self": 122.36145349993035,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 101.9879514003275,
"count": 343126,
"is_parallel": true,
"self": 101.9879514003275
},
"communicator.exchange": {
"total": 1399.4482094999576,
"count": 343126,
"is_parallel": true,
"self": 1399.4482094999576
},
"steps_from_proto": {
"total": 357.05007499972476,
"count": 686252,
"is_parallel": true,
"self": 68.74486049933853,
"children": {
"_process_rank_one_or_two_observation": {
"total": 288.3052145003862,
"count": 2745008,
"is_parallel": true,
"self": 288.3052145003862
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1751.1799156000186,
"count": 343126,
"self": 33.28170960003922,
"children": {
"process_trajectory": {
"total": 728.353284099981,
"count": 343126,
"self": 727.0889033999805,
"children": {
"RLTrainer._checkpoint": {
"total": 1.2643807000005154,
"count": 10,
"self": 1.2643807000005154
}
}
},
"_update_policy": {
"total": 989.5449218999983,
"count": 240,
"self": 552.6703163000046,
"children": {
"TorchPOCAOptimizer.update": {
"total": 436.87460559999363,
"count": 7206,
"self": 436.87460559999363
}
}
}
}
}
}
},
"trainer_threads": {
"total": 6.000000212225132e-07,
"count": 1,
"self": 6.000000212225132e-07
},
"TrainerController._save_models": {
"total": 0.1466402999994898,
"count": 1,
"self": 0.024768599999333674,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12187170000015612,
"count": 1,
"self": 0.12187170000015612
}
}
}
}
}
}
}