poca-SoccerTwos / run_logs /timers.json
Juan Francisco Kurucz
First Push`
898480a
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 3.295736074447632,
"min": 3.295736074447632,
"max": 3.295736074447632,
"count": 1
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 105463.5546875,
"min": 105463.5546875,
"max": 105463.5546875,
"count": 1
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 595.4,
"min": 595.4,
"max": 595.4,
"count": 1
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 23816.0,
"min": 23816.0,
"max": 23816.0,
"count": 1
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1199.502900161272,
"min": 1199.502900161272,
"max": 1199.502900161272,
"count": 1
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 14394.034801935264,
"min": 14394.034801935264,
"max": 14394.034801935264,
"count": 1
},
"SoccerTwos.Step.mean": {
"value": 9928.0,
"min": 9928.0,
"max": 9928.0,
"count": 1
},
"SoccerTwos.Step.sum": {
"value": 9928.0,
"min": 9928.0,
"max": 9928.0,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.035124365240335464,
"min": -0.035124365240335464,
"max": -0.035124365240335464,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -0.6322385668754578,
"min": -0.6322385668754578,
"max": -0.6322385668754578,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.03510645404458046,
"min": -0.03510645404458046,
"max": -0.03510645404458046,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.6319161653518677,
"min": -0.6319161653518677,
"max": -0.6319161653518677,
"count": 1
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.11206666462951237,
"min": -0.11206666462951237,
"max": -0.11206666462951237,
"count": 1
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -2.0171999633312225,
"min": -2.0171999633312225,
"max": -2.0171999633312225,
"count": 1
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.11206666462951237,
"min": -0.11206666462951237,
"max": -0.11206666462951237,
"count": 1
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -2.0171999633312225,
"min": -2.0171999633312225,
"max": -2.0171999633312225,
"count": 1
},
"SoccerTwos.IsTraining.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1
},
"SoccerTwos.IsTraining.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1684774204",
"python_version": "3.9.10 (tags/v3.9.10:f2f3f53, Jan 17 2022, 15:14:21) [MSC v.1929 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\USUARIO\\.pyenv\\pyenv-win\\versions\\3.9.10\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=.\\\\SoccerTwos\\\\SoccerTwos.exe --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.13.0+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1684774219"
},
"total": 15.152778699999999,
"count": 1,
"self": 0.1582077999999978,
"children": {
"run_training.setup": {
"total": 0.07621860000000003,
"count": 1,
"self": 0.07621860000000003
},
"TrainerController.start_learning": {
"total": 14.9183523,
"count": 1,
"self": 0.012909400000003401,
"children": {
"TrainerController._reset_env": {
"total": 2.6936434,
"count": 1,
"self": 2.6936434
},
"TrainerController.advance": {
"total": 12.147180199999996,
"count": 1004,
"self": 0.014211399999970453,
"children": {
"env_step": {
"total": 10.814022300000001,
"count": 1004,
"self": 8.711554699999976,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2.094348800000026,
"count": 1004,
"self": 0.07127090000002223,
"children": {
"TorchPolicy.evaluate": {
"total": 2.0230779000000036,
"count": 2000,
"self": 2.0230779000000036
}
}
},
"workers": {
"total": 0.008118799999999649,
"count": 1004,
"self": 0.0,
"children": {
"worker_root": {
"total": 12.623107200000037,
"count": 1004,
"is_parallel": true,
"self": 5.335519299999998,
"children": {
"steps_from_proto": {
"total": 0.0010125000000000828,
"count": 2,
"is_parallel": true,
"self": 0.00022460000000013025,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0007878999999999525,
"count": 8,
"is_parallel": true,
"self": 0.0007878999999999525
}
}
},
"UnityEnvironment.step": {
"total": 7.286575400000039,
"count": 1004,
"is_parallel": true,
"self": 0.29369840000002867,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.19144409999999246,
"count": 1004,
"is_parallel": true,
"self": 0.19144409999999246
},
"communicator.exchange": {
"total": 5.8811339999999905,
"count": 1004,
"is_parallel": true,
"self": 5.8811339999999905
},
"steps_from_proto": {
"total": 0.9202989000000277,
"count": 2008,
"is_parallel": true,
"self": 0.1803979999999803,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.7399009000000474,
"count": 8032,
"is_parallel": true,
"self": 0.7399009000000474
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1.3189465000000244,
"count": 1004,
"self": 0.07088530000000892,
"children": {
"process_trajectory": {
"total": 1.2480612000000155,
"count": 1004,
"self": 1.2480612000000155
}
}
}
}
},
"trainer_threads": {
"total": 5.00000002290335e-07,
"count": 1,
"self": 5.00000002290335e-07
},
"TrainerController._save_models": {
"total": 0.06461879999999809,
"count": 1,
"self": 0.001846499999999196,
"children": {
"RLTrainer._checkpoint": {
"total": 0.06277229999999889,
"count": 1,
"self": 0.06277229999999889
}
}
}
}
}
}
}