PPO-Pyramids / run_logs /timers.json
ahmadsy's picture
First Pyramids agent
c47411a verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.2420235425233841,
"min": 0.2252901792526245,
"max": 1.3850210905075073,
"count": 50
},
"Pyramids.Policy.Entropy.sum": {
"value": 7225.85498046875,
"min": 6701.03125,
"max": 42016.0,
"count": 50
},
"Pyramids.Step.mean": {
"value": 1499927.0,
"min": 29952.0,
"max": 1499927.0,
"count": 50
},
"Pyramids.Step.sum": {
"value": 1499927.0,
"min": 29952.0,
"max": 1499927.0,
"count": 50
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.699270486831665,
"min": -0.09082455188035965,
"max": 0.790700376033783,
"count": 50
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 203.4877166748047,
"min": -21.888717651367188,
"max": 239.58221435546875,
"count": 50
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.007752297446131706,
"min": -0.0003571129927877337,
"max": 0.29707929491996765,
"count": 50
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 2.255918502807617,
"min": -0.1046341061592102,
"max": 71.59610748291016,
"count": 50
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06614539291851024,
"min": 0.0647650169185683,
"max": 0.07733903615872025,
"count": 50
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9260355008591433,
"min": 0.49341082538330217,
"max": 1.0827465062220833,
"count": 50
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015098810502552492,
"min": 0.0008368462105235164,
"max": 0.0180267707114884,
"count": 50
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2113833470357349,
"min": 0.00920530831575868,
"max": 0.270401560672326,
"count": 50
},
"Pyramids.Policy.LearningRate.mean": {
"value": 0.00015144260666200476,
"min": 0.00015144260666200476,
"max": 0.00029838354339596195,
"count": 50
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0021201964932680666,
"min": 0.0020886848037717336,
"max": 0.0040725322424893,
"count": 50
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.15048085238095238,
"min": 0.15048085238095238,
"max": 0.19946118095238097,
"count": 50
},
"Pyramids.Policy.Epsilon.sum": {
"value": 2.106731933333333,
"min": 1.3962282666666668,
"max": 2.8575107000000006,
"count": 50
},
"Pyramids.Policy.Beta.mean": {
"value": 0.005053037152857142,
"min": 0.005053037152857142,
"max": 0.009946171977142856,
"count": 50
},
"Pyramids.Policy.Beta.sum": {
"value": 0.07074252013999999,
"min": 0.06962320384,
"max": 0.13576531893,
"count": 50
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009045292623341084,
"min": 0.009045292623341084,
"max": 0.5670269131660461,
"count": 50
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.12663409113883972,
"min": 0.12663409113883972,
"max": 3.9691884517669678,
"count": 50
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 265.01785714285717,
"min": 245.5609756097561,
"max": 999.0,
"count": 50
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29682.0,
"min": 15984.0,
"max": 32406.0,
"count": 50
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.7171178497374058,
"min": -1.0000000521540642,
"max": 1.7347033734165005,
"count": 50
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 192.31719917058945,
"min": -29.74900161474943,
"max": 223.9947980493307,
"count": 50
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.7171178497374058,
"min": -1.0000000521540642,
"max": 1.7347033734165005,
"count": 50
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 192.31719917058945,
"min": -29.74900161474943,
"max": 223.9947980493307,
"count": 50
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.02488347915498577,
"min": 0.023911526658549356,
"max": 11.845616444945335,
"count": 50
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.7869496653584065,
"min": 2.7869496653584065,
"max": 189.52986311912537,
"count": 50
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 50
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 50
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1731518937",
"python_version": "3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1731524214"
},
"total": 5276.5313382879995,
"count": 1,
"self": 0.6404445269990902,
"children": {
"run_training.setup": {
"total": 0.0851747910000995,
"count": 1,
"self": 0.0851747910000995
},
"TrainerController.start_learning": {
"total": 5275.80571897,
"count": 1,
"self": 3.7140206357498755,
"children": {
"TrainerController._reset_env": {
"total": 2.7510618830001476,
"count": 1,
"self": 2.7510618830001476
},
"TrainerController.advance": {
"total": 5269.340263712249,
"count": 97207,
"self": 3.6612425241137316,
"children": {
"env_step": {
"total": 3638.2077083330587,
"count": 97207,
"self": 3389.888309416723,
"children": {
"SubprocessEnvManager._take_step": {
"total": 246.25262339826077,
"count": 97207,
"self": 10.138080488414744,
"children": {
"TorchPolicy.evaluate": {
"total": 236.11454290984602,
"count": 94009,
"self": 236.11454290984602
}
}
},
"workers": {
"total": 2.0667755180747918,
"count": 97206,
"self": 0.0,
"children": {
"worker_root": {
"total": 5264.728408751971,
"count": 97206,
"is_parallel": true,
"self": 2139.64836307901,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.002366988999710884,
"count": 1,
"is_parallel": true,
"self": 0.0007567939974251203,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016101950022857636,
"count": 8,
"is_parallel": true,
"self": 0.0016101950022857636
}
}
},
"UnityEnvironment.step": {
"total": 0.060700216999975964,
"count": 1,
"is_parallel": true,
"self": 0.000774199000261433,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005190749998291722,
"count": 1,
"is_parallel": true,
"self": 0.0005190749998291722
},
"communicator.exchange": {
"total": 0.057338566999533214,
"count": 1,
"is_parallel": true,
"self": 0.057338566999533214
},
"steps_from_proto": {
"total": 0.0020683760003521456,
"count": 1,
"is_parallel": true,
"self": 0.00043154400100320345,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016368319993489422,
"count": 8,
"is_parallel": true,
"self": 0.0016368319993489422
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 3125.080045672961,
"count": 97205,
"is_parallel": true,
"self": 72.98485001781864,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 45.5973766119987,
"count": 97205,
"is_parallel": true,
"self": 45.5973766119987
},
"communicator.exchange": {
"total": 2811.1031216530528,
"count": 97205,
"is_parallel": true,
"self": 2811.1031216530528
},
"steps_from_proto": {
"total": 195.3946973900911,
"count": 97205,
"is_parallel": true,
"self": 42.181809910593984,
"children": {
"_process_rank_one_or_two_observation": {
"total": 153.2128874794971,
"count": 777640,
"is_parallel": true,
"self": 153.2128874794971
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1627.4713128550766,
"count": 97206,
"self": 7.007601120218169,
"children": {
"process_trajectory": {
"total": 252.08617351287012,
"count": 97206,
"self": 251.7073947038698,
"children": {
"RLTrainer._checkpoint": {
"total": 0.37877880900032324,
"count": 3,
"self": 0.37877880900032324
}
}
},
"_update_policy": {
"total": 1368.3775382219883,
"count": 691,
"self": 555.2198198440856,
"children": {
"TorchPPOOptimizer.update": {
"total": 813.1577183779027,
"count": 34272,
"self": 813.1577183779027
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3930002751294523e-06,
"count": 1,
"self": 1.3930002751294523e-06
},
"TrainerController._save_models": {
"total": 0.00037134600097488146,
"count": 1,
"self": 3.178200131515041e-05,
"children": {
"RLTrainer._checkpoint": {
"total": 0.00033956399965973105,
"count": 1,
"self": 0.00033956399965973105
}
}
}
}
}
}
}