vjeansel commited on
Commit
d293660
1 Parent(s): ef4aff1

Module 1: First model

Browse files
README.md ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - LunarLander-v2
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: PPO-v1
10
+ results:
11
+ - metrics:
12
+ - type: mean_reward
13
+ value: 252.71 +/- 23.21
14
+ name: mean_reward
15
+ task:
16
+ type: reinforcement-learning
17
+ name: reinforcement-learning
18
+ dataset:
19
+ name: LunarLander-v2
20
+ type: LunarLander-v2
21
+ ---
22
+
23
+ # **PPO-v1** Agent playing **LunarLander-v2**
24
+ This is a trained model of a **PPO-v1** agent playing **LunarLander-v2**
25
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
26
+
27
+ ## Usage (with Stable-baselines3)
28
+ TODO: Add your code
29
+
30
+
31
+ ```python
32
+ from stable_baselines3 import ...
33
+ from huggingface_sb3 import load_from_hub
34
+
35
+ ...
36
+ ```
config.json CHANGED
@@ -1 +1 @@
1
- {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gASVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param sde_net_arch: Network architecture for extracting features\n when using gSDE. If None, the latent features from the policy will be used.\n Pass an empty list to use the states as features.\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7f2001b5d5f0>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7f2001b5d680>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7f2001b5d710>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7f2001b5d7a0>", "_build": "<function ActorCriticPolicy._build at 0x7f2001b5d830>", "forward": "<function ActorCriticPolicy.forward at 0x7f2001b5d8c0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7f2001b5d950>", "_predict": "<function ActorCriticPolicy._predict at 0x7f2001b5d9e0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7f2001b5da70>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7f2001b5db00>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7f2001b5db90>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7f2001baa7e0>"}, "verbose": 1, "policy_kwargs": {}, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gASVwwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBVudW1weS5jb3JlLm11bHRpYXJyYXmUjAxfcmVjb25zdHJ1Y3SUk5RoBowHbmRhcnJheZSTlEsAhZRDAWKUh5RSlChLAUsIhZRoColDIAAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lHSUYowEaGlnaJRoEmgUSwCFlGgWh5RSlChLAUsIhZRoColDIAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/lHSUYowNYm91bmRlZF9iZWxvd5RoEmgUSwCFlGgWh5RSlChLAUsIhZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYolDCAAAAAAAAAAAlHSUYowNYm91bmRlZF9hYm92ZZRoEmgUSwCFlGgWh5RSlChLAUsIhZRoKolDCAAAAAAAAAAAlHSUYowKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False]", "bounded_above": "[False False False False False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.discrete.Discrete'>", ":serialized:": "gASVggAAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLBIwGX3NoYXBllCmMBWR0eXBllIwFbnVtcHmUaAeTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": 4, "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 32, "num_timesteps": 0, "_total_timesteps": 500000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1654368074.201593, "learning_rate": 0.0003, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gASVvwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwEZnVuY5RLgEMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCB9lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gASVjQQAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyBLCIaUaAOMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiiUIABAAAzbPQvVKww7nYcec7J28BOZVTwDrGN6m5AACAPwAAgD/mpSW915MkuQdny7qztim1V7aVOsXI7zkAAIA/AACAP8CK5T2SqAg/7ZN3vS1vVb4/o7+7GCKDvQAAAAAAAAAApuCsvQPxVz+lqGG7sy+KvmwWNb3OCMw8AAAAAAAAAAAzerI9w/E9urk+NDvMGXO4gxyOOU662bkAAIA/AACAPzPhDr3DyTW6y2yHOqFs8zUM+MA5Ig2euQAAgD8AAIA/GvJuvdgA1T76p1Y+f1uEvhv7jj1LYnA9AAAAAAAAAABm87o+Q4kBP1ZAcb5/WGu+WTfvPToblL0AAAAAAAAAABoBfL3DEQ+6KtyCOsRwlzVeK3g5MwqWuQAAgD8AAIA/IFRjPggOJD8P9pW9j2iIvqivXz0ZsbE8AAAAAAAAAADz+mC+4fiCvLycszrjcM84bpruPWIW2bkAAIA/AACAP5ocKL32TDu6TRkDPGyGZDV3Dbs65thaNAAAgD8AAIA/AJL4vK7nm7ouKIY5q41ZNBNgBruTU5q4AACAPwAAgD9mE668UlDAuQ4AQToGHoc2MG2sOmMuY7kAAIA/AACAPzO8oTz2nAy6gWcGvPPsYjXB9u65ffjHtAAAgD8AAIA/lmCCPpxFnT96l84+jqbGvuEfSD51mxY9AAAAAAAAAAAaUbO9KUAiumaF5rpOeeQ1dpgvOsITVrUAAAAAAACAPwCqgr3/wZ0/M7Byvnauh74Ig+m779U2vQAAAAAAAAAAwImVPeFak7pC/yG8CdyEtbelNLvD8uY0AACAPwAAgD9NIF49w+VMuj2mtrtXwpk4vcTruruoebcAAIA/AACAP5pu/zyPhne68/Zfu7DHTTiJg1c7oPD6OQAAgD8AAIA/qlqlPq0PBT9C6GK+evp3vqrHCT06KPE8AAAAAAAAAAAAMCm8SHXSupJcbDy3waG8r/IAvMNxiL0AAIA/AACAPxqlDT7PSSa8aPUXPA/2nzwIaom9VsuDPQAAgD8AAIA/mgmKu3cBtT++bxq9lv24vYalbLt1sy+9AAAAAAAAAABmyUS9KbhMuvxTkjp8nuO1FLNrusCgrLkAAIA/AACAPzq5DL4K8Wk8iHPLPcw7gL7LOwQ7+uPPPQAAAAAAAAAApgHWvSzSfT+WtjW9bXhhvuP2Z709dEc8AAAAAAAAAABmA3G99gRruhUj/jneHW0wfzSiunpyE7kAAIA/AACAP5qRzDtIS5C6inb+utGXi7bUVLA62y36NQAAgD8AAIA/ZrypvSnoZLotS/+4FQ4vtEgG0DiW7hA4AACAPwAAgD8m+Ue+yp2UPyqos755Mn++SGhKvg6YYL0AAAAAAAAAAJR0lGIu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gASVqAAAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyCFlGgDjAVkdHlwZZSTlIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYolDIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlHSUYi4="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.08249600000000001, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 120, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gASVvwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwEZnVuY5RLgEMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCB9lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "system_info": {"OS": "Linux-5.4.188+-x86_64-with-Ubuntu-18.04-bionic #1 SMP Sun Apr 24 10:03:06 PDT 2022", "Python": "3.7.13", "Stable-Baselines3": "1.5.0", "PyTorch": "1.11.0+cu113", "GPU Enabled": "True", "Numpy": "1.21.6", "Gym": "0.21.0"}}
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gASVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param sde_net_arch: Network architecture for extracting features\n when using gSDE. If None, the latent features from the policy will be used.\n Pass an empty list to use the states as features.\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7fa79df12320>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7fa79df123b0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7fa79df12440>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7fa79df124d0>", "_build": "<function ActorCriticPolicy._build at 0x7fa79df12560>", "forward": "<function ActorCriticPolicy.forward at 0x7fa79df125f0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7fa79df12680>", "_predict": "<function ActorCriticPolicy._predict at 0x7fa79df12710>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7fa79df127a0>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7fa79df12830>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7fa79df128c0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7fa79df67600>"}, "verbose": 1, "policy_kwargs": {}, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gASVwwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBVudW1weS5jb3JlLm11bHRpYXJyYXmUjAxfcmVjb25zdHJ1Y3SUk5RoBowHbmRhcnJheZSTlEsAhZRDAWKUh5RSlChLAUsIhZRoColDIAAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lHSUYowEaGlnaJRoEmgUSwCFlGgWh5RSlChLAUsIhZRoColDIAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/lHSUYowNYm91bmRlZF9iZWxvd5RoEmgUSwCFlGgWh5RSlChLAUsIhZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYolDCAAAAAAAAAAAlHSUYowNYm91bmRlZF9hYm92ZZRoEmgUSwCFlGgWh5RSlChLAUsIhZRoKolDCAAAAAAAAAAAlHSUYowKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False]", "bounded_above": "[False False False False False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.discrete.Discrete'>", ":serialized:": "gASVggAAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLBIwGX3NoYXBllCmMBWR0eXBllIwFbnVtcHmUaAeTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": 4, "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 32, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1654368528.224836, "learning_rate": 0.0003, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gASVvwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwEZnVuY5RLgEMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCB9lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gASVjQQAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyBLCIaUaAOMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiiUIABAAAAN6BvGHc2j2B9Y49IIAuvj6y4zxSsLE9AAAAAAAAAAAz86I5XCsIut76H7k77xS2Zw7aOvbvjTUAAIA/AACAPzM27DxI7Ye6Fjmzu/u9YzjFwwM7jrO2NwAAgD8AAIA/pT6avqTRZrtcF786ghuiN5OxrDxGJN65AACAPwAAgD9AOis+HL2UPkBTXL6XLGy+q8DCvHZd+rwAAAAAAAAAAMUcsb68x9m9cng/ODOzVzdb4/w+/AGdtwAAgD8AAIA/wG4Avns8r7rcVKG7miQNuY3FNTsK4/E5AACAPwAAgD+aUcc8FtJbPydHxL3Mzr6+2dF8vCZphb0AAAAAAAAAAHMCpb66TQA/OiKAPdMtw77XTxK9ssIiPQAAAAAAAAAAaotvvqOXMj02Nr26yoEuObSCzb6oZ0W4AACAPwAAgD/mWV4+IbuHPw9uoT4kN4a+aWxIPkctIj0AAAAAAAAAAJoVJD0/Hq8/avijPmwpjL58P9Y8Hk7UPQAAAAAAAAAAgOacvcP9JbpSQ166Lac+NfuUILt1A4E5AACAPwAAgD/N2su8j/pXutWp6TrVj7w0yw7uuvFyBroAAIA/AACAP+ATdb6rWIU/sjCfvuEgtr5hCEC+IJQCuwAAAAAAAAAAWvZ6vgMhI7zrzV+8X2XfuV4uhz1LNLk6AACAPwAAgD+mdHm+gMFhP57Rqb6XCqe+Shg/vttrjr0AAAAAAAAAAJpZgrmP3kC6HsGOumwHD7ZwxMO4ZHCENQAAgD8AAIA/cwC2vY+GWboMwI66pXccthF1QjuAN6Q5AACAPwAAgD9wE0++J8WfP9D//74ce/a+jco3vvXGbb0AAAAAAAAAAJp4Q71SIIe5me3Iu5E7kjhtb687gMBuOgAAgD8AAIA/zQwBu4cUvD/1jq68drOnvTHtarxgA+48AAAAAAAAAABmnFE8KQAyumZoXTo4ZwQ2VaZZOyYHgLkAAIA/AACAPxYAbb7cXUa8eCr3t+d93LWRza89fksUNwAAgD8AAIA/AKB4uvY0aLruczM4MXWxM82ozDruVk23AACAPwAAgD9mFpM8j4Jjuj27+7tjyi43ZgsVu1p0mLYAAIA/AACAPzgxoL4wey4/jiYOvhzj1b6oG5K+UifdPQAAAAAAAAAAMytqvCksErqKaxe6BlHvtRft+boMfiw5AACAPwAAgD9mhMk9SOWGutoqfDtaF2U4rXEROzLlY7kAAIA/AACAP01+Vb1cZ1a6tvljuQDQTDK+rQa7ageEOAAAgD8AAIA/ZkmjvHuWl7oew6U6gHyVNIi6Gboscr25AACAPwAAgD8znXu8w6Vxun6cHjsra4q2OXUgu/q6NboAAIA/AACAP5R0lGIu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gASVqAAAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyCFlGgDjAVkdHlwZZSTlIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYolDIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlHSUYi4="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gASVchAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIS3ZsBOIjYUCUhpRSlIwBbJRN6AOMAXSUR0CNe1igkC3gdX2UKGgGaAloD0MIUU1J1uHIBECUhpRSlGgVS/RoFkdAjYRXq7iAD3V9lChoBmgJaA9DCEInhA46dGBAlIaUUpRoFU3oA2gWR0CNiP51vES/dX2UKGgGaAloD0MINQcI5mjMYkCUhpRSlGgVTegDaBZHQI2Qtgc94eN1fZQoaAZoCWgPQwiXGwx1WElVQJSGlFKUaBVN6ANoFkdAjZGuM2m52HV9lChoBmgJaA9DCK5JtyVyfWFAlIaUUpRoFU3oA2gWR0CNkmf29L6DdX2UKGgGaAloD0MIPiR872/dYECUhpRSlGgVTegDaBZHQI2Y7B2wFC91fZQoaAZoCWgPQwiAKJgxBRsTwJSGlFKUaBVL3WgWR0CNmgI8hcJMdX2UKGgGaAloD0MI8PlhhHBAYkCUhpRSlGgVTegDaBZHQI2gqhJyyUt1fZQoaAZoCWgPQwh+calKWx5cQJSGlFKUaBVN6ANoFkdAjaPejua4MHV9lChoBmgJaA9DCM5SspyEVGFAlIaUUpRoFU3oA2gWR0CNswU/wAlwdX2UKGgGaAloD0MIcR5OYDoOXkCUhpRSlGgVTegDaBZHQI25Aw22oeh1fZQoaAZoCWgPQwgaMbPP43xhQJSGlFKUaBVN6ANoFkdAjcbARsdkrnV9lChoBmgJaA9DCOS/QBCgPmJAlIaUUpRoFU3oA2gWR0CNyW0cfeUIdX2UKGgGaAloD0MIXI3sSssUQkCUhpRSlGgVS99oFkdAjc2LylN1yXV9lChoBmgJaA9DCEYJ+gs9+FtAlIaUUpRoFU3oA2gWR0CN0h93r2QGdX2UKGgGaAloD0MIMnGrIAYIXECUhpRSlGgVTegDaBZHQI3W+yE+Pil1fZQoaAZoCWgPQwheonpr4F1gQJSGlFKUaBVN6ANoFkdAjdtqNhmXgXV9lChoBmgJaA9DCApoImz4zmBAlIaUUpRoFU3oA2gWR0CN5JyJbdJrdX2UKGgGaAloD0MIVRSvsrbHX0CUhpRSlGgVTegDaBZHQI3nRpg1FYx1fZQoaAZoCWgPQwguBDkoYehiQJSGlFKUaBVN6ANoFkdAjgG6naWX1XV9lChoBmgJaA9DCBjpRe1+5mRAlIaUUpRoFU3oA2gWR0COA0GxD9fkdX2UKGgGaAloD0MIx6F+F7YyNcCUhpRSlGgVS9JoFkdAjgXJEH+qBHV9lChoBmgJaA9DCPyPTIdOjwxAlIaUUpRoFUv3aBZHQI4GCA6Mir11fZQoaAZoCWgPQwibkUHuInhdQJSGlFKUaBVN6ANoFkdAjgZVIiC8OHV9lChoBmgJaA9DCN2271F/XWFAlIaUUpRoFU3oA2gWR0COUqkt29tedX2UKGgGaAloD0MIjdXm/9VXZECUhpRSlGgVTegDaBZHQI5gi6MBIWh1fZQoaAZoCWgPQwg3qP3WzrpkQJSGlFKUaBVN6ANoFkdAjmZ6pHZsbnV9lChoBmgJaA9DCE637BD/qWFAlIaUUpRoFU3oA2gWR0COZ9Kf4AS4dX2UKGgGaAloD0MI46lHGlyVYECUhpRSlGgVTegDaBZHQI5zxPhybQV1fZQoaAZoCWgPQwhDWI0lLLRjQJSGlFKUaBVN6ANoFkdAjnTclPacqnV9lChoBmgJaA9DCCzzVl2HijZAlIaUUpRoFUvoaBZHQI6haMUAT7F1fZQoaAZoCWgPQwihvfp4aF9jQJSGlFKUaBVN6ANoFkdAjqJG7SRbKXV9lChoBmgJaA9DCEeSIFyBOGFAlIaUUpRoFU3oA2gWR0COpddO6/ZedX2UKGgGaAloD0MIWKzhIndyaECUhpRSlGgVTegDaBZHQI6sH1rZamp1fZQoaAZoCWgPQwjUfJV8bBRgQJSGlFKUaBVN6ANoFkdAjq5wazeGf3V9lChoBmgJaA9DCIBFfv2QaGNAlIaUUpRoFU3oA2gWR0COt6pON5t4dX2UKGgGaAloD0MIMJ+sGC6OYkCUhpRSlGgVTegDaBZHQI7A8IcBEKF1fZQoaAZoCWgPQwi4yagyjLdmQJSGlFKUaBVN6ANoFkdAjsV1v2oNu3V9lChoBmgJaA9DCGWryykBjl5AlIaUUpRoFU3oA2gWR0COzJMYdhiLdX2UKGgGaAloD0MIsdtnlRm1YkCUhpRSlGgVTegDaBZHQI7NdOEdvKl1fZQoaAZoCWgPQwjXpNsSufNcQJSGlFKUaBVN6ANoFkdAjs4a9K28ZnV9lChoBmgJaA9DCEc5mE2AVWBAlIaUUpRoFU3oA2gWR0CO0+qbSZ0CdX2UKGgGaAloD0MIWK63zVTPW0CUhpRSlGgVTegDaBZHQI7a9/QSi/R1fZQoaAZoCWgPQwhjQzf7gw5gQJSGlFKUaBVN6ANoFkdAjt3H2RJVbXV9lChoBmgJaA9DCPusMlNafy7AlIaUUpRoFUviaBZHQI7iAeT3Zf51fZQoaAZoCWgPQwi+E7NeDANfQJSGlFKUaBVN6ANoFkdAjuukBCD28XV9lChoBmgJaA9DCHNKQEzCQGBAlIaUUpRoFU3oA2gWR0CO8PVbzK9xdX2UKGgGaAloD0MIL0/nilJAS0CUhpRSlGgVS+hoFkdAjvZQ0O3DvXV9lChoBmgJaA9DCO9TVWggLmJAlIaUUpRoFU3oA2gWR0CO/V22Xsw+dX2UKGgGaAloD0MIKChFK/daYUCUhpRSlGgVTegDaBZHQI8DTadtl7N1fZQoaAZoCWgPQwjww0FClG8hwJSGlFKUaBVNBwFoFkdAjwRZMURFqnV9lChoBmgJaA9DCL75DRMN3VdAlIaUUpRoFU3oA2gWR0CPC4YCQtBfdX2UKGgGaAloD0MIRSkhWFVtV0CUhpRSlGgVTegDaBZHQI8PYUzsQd11fZQoaAZoCWgPQwgJwD+lSiNjQJSGlFKUaBVN6ANoFkdAjxdK77Kq43V9lChoBmgJaA9DCLxZg/fVX2JAlIaUUpRoFU3oA2gWR0CPGhLlmvnsdX2UKGgGaAloD0MI0T3rGi2HKcCUhpRSlGgVS+VoFkdAjy2uZ1FH8XV9lChoBmgJaA9DCHEEqRQ7+GZAlIaUUpRoFU3oA2gWR0CPOR4UN8VpdX2UKGgGaAloD0MI9fOmIpWcYkCUhpRSlGgVTegDaBZHQI86mDHwPRR1fZQoaAZoCWgPQwi4c2GkF0BhQJSGlFKUaBVN6ANoFkdAjzzMcABDHHV9lChoBmgJaA9DCFuwVBfwlGRAlIaUUpRoFU3oA2gWR0CPPQhAWznidX2UKGgGaAloD0MIZOWXwRjeW0CUhpRSlGgVTegDaBZHQI89V/QSi/R1fZQoaAZoCWgPQwhBf6FHjN4iQJSGlFKUaBVLxWgWR0CPPlKxLTQWdX2UKGgGaAloD0MIQDBHj18fZ0CUhpRSlGgVTegDaBZHQI9ANNet0V91fZQoaAZoCWgPQwjkZyPXTX0xQJSGlFKUaBVL1GgWR0CPjwXb/Ot5dX2UKGgGaAloD0MIo7CLoocEYECUhpRSlGgVTegDaBZHQI+WHBciW3V1fZQoaAZoCWgPQwgiiV5GsVVUQJSGlFKUaBVN6ANoFkdAj5twE6kqMHV9lChoBmgJaA9DCN/cXz1uj2VAlIaUUpRoFU3oA2gWR0CPp1nMdLg5dX2UKGgGaAloD0MInOCbps9eX0CUhpRSlGgVTegDaBZHQI+oR99c8kl1fZQoaAZoCWgPQwglrfiGwmf3v5SGlFKUaBVLs2gWR0CPqXmK64DtdX2UKGgGaAloD0MIcCNli6S9G8CUhpRSlGgVS+NoFkdAj7XemelKsnV9lChoBmgJaA9DCEOOrWcIlydAlIaUUpRoFUvMaBZHQI/GJ4W1twd1fZQoaAZoCWgPQwg7w9SWOh1fQJSGlFKUaBVN6ANoFkdAj84A7YChe3V9lChoBmgJaA9DCACo4sYtn15AlIaUUpRoFU3oA2gWR0CPztBJqZc+dX2UKGgGaAloD0MIdQZGXtblZkCUhpRSlGgVTegDaBZHQI/RldLQHA11fZQoaAZoCWgPQwhmh/iHLUtmQJSGlFKUaBVN6ANoFkdAj9aeg13t8nV9lChoBmgJaA9DCBGsqpffM1xAlIaUUpRoFU3oA2gWR0CP4I7p3X7MdX2UKGgGaAloD0MISvCGNKoxY0CUhpRSlGgVTegDaBZHQI/opdfLLZB1fZQoaAZoCWgPQwg7ONibmLxiQJSGlFKUaBVN6ANoFkdAj/OgtnPE9HV9lChoBmgJaA9DCEOtad7xdmVAlIaUUpRoFU3oA2gWR0CP9Sy57PY4dX2UKGgGaAloD0MIcQFolK6mZUCUhpRSlGgVTegDaBZHQI/8OUSqU/x1fZQoaAZoCWgPQwi214LemyphQJSGlFKUaBVN6ANoFkdAkAHC3PRiPXV9lChoBmgJaA9DCGiVmdL6pWNAlIaUUpRoFU3oA2gWR0CQA0ukk8ifdX2UKGgGaAloD0MIc/G3PcELYUCUhpRSlGgVTegDaBZHQJAFeDVYp2F1fZQoaAZoCWgPQwjxZDcz+nNiQJSGlFKUaBVN6ANoFkdAkApMg2ZRbnV9lChoBmgJaA9DCEN0CBwJ51xAlIaUUpRoFU3oA2gWR0CQDSdO6/ZedX2UKGgGaAloD0MI2q1lMhyIYUCUhpRSlGgVTegDaBZHQJATo+B6KLt1fZQoaAZoCWgPQwiTpkHRPBxjQJSGlFKUaBVN6ANoFkdAkBbPJV81GnV9lChoBmgJaA9DCHnKarqekGJAlIaUUpRoFU3oA2gWR0CQF2gnc+JQdX2UKGgGaAloD0MIur2kMVqKWkCUhpRSlGgVTegDaBZHQJAhylhw2l51fZQoaAZoCWgPQwihR4ye25JmQJSGlFKUaBVN6ANoFkdAkCMSK3uuzXV9lChoBmgJaA9DCL68APtoS2ZAlIaUUpRoFU3oA2gWR0CQKnE/SpirdX2UKGgGaAloD0MI1/hM9s+vM0CUhpRSlGgVS9poFkdAkC01IiC8OHV9lChoBmgJaA9DCE30+SgjS19AlIaUUpRoFU3oA2gWR0CQMBkRSP2gdX2UKGgGaAloD0MIud42U6FKZkCUhpRSlGgVTegDaBZHQJAyDZrYXft1fZQoaAZoCWgPQwiKkLqd/epkQJSGlFKUaBVN6ANoFkdAkDIv7aZhKHV9lChoBmgJaA9DCBwkRPkCWWFAlIaUUpRoFU3oA2gWR0CQMuS39aUzdX2UKGgGaAloD0MIj95wH7m4XkCUhpRSlGgVTegDaBZHQJAz6wcHWz51fZQoaAZoCWgPQwgeGED40JZoQJSGlFKUaBVNeQFoFkdAkDX7/bTMJXV9lChoBmgJaA9DCAmH3uLhBFlAlIaUUpRoFU3oA2gWR0CQNqOwgTysdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 124, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gASVvwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwEZnVuY5RLgEMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flIxIL3Vzci9sb2NhbC9saWIvcHl0aG9uMy43L2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCB9lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "system_info": {"OS": "Linux-5.4.188+-x86_64-with-Ubuntu-18.04-bionic #1 SMP Sun Apr 24 10:03:06 PDT 2022", "Python": "3.7.13", "Stable-Baselines3": "1.5.0", "PyTorch": "1.11.0+cu113", "GPU Enabled": "True", "Numpy": "1.21.6", "Gym": "0.21.0"}}
ppo-LunarLander-v2.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:435171e81115796a76d7da817d0329cc03db70de0f2181e1a44b3390243273a8
3
- size 139260
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eed77f5961d29bf010b3526a2de5466ad74596d2d7056bf1bef1644d847a1eb2
3
+ size 144837
ppo-LunarLander-v2/data CHANGED
@@ -4,19 +4,19 @@
4
  ":serialized:": "gASVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param sde_net_arch: Network architecture for extracting features\n when using gSDE. If None, the latent features from the policy will be used.\n Pass an empty list to use the states as features.\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
- "__init__": "<function ActorCriticPolicy.__init__ at 0x7f2001b5d5f0>",
8
- "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7f2001b5d680>",
9
- "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7f2001b5d710>",
10
- "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7f2001b5d7a0>",
11
- "_build": "<function ActorCriticPolicy._build at 0x7f2001b5d830>",
12
- "forward": "<function ActorCriticPolicy.forward at 0x7f2001b5d8c0>",
13
- "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7f2001b5d950>",
14
- "_predict": "<function ActorCriticPolicy._predict at 0x7f2001b5d9e0>",
15
- "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7f2001b5da70>",
16
- "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7f2001b5db00>",
17
- "predict_values": "<function ActorCriticPolicy.predict_values at 0x7f2001b5db90>",
18
  "__abstractmethods__": "frozenset()",
19
- "_abc_impl": "<_abc_data object at 0x7f2001baa7e0>"
20
  },
21
  "verbose": 1,
22
  "policy_kwargs": {},
@@ -42,12 +42,12 @@
42
  "_np_random": null
43
  },
44
  "n_envs": 32,
45
- "num_timesteps": 0,
46
- "_total_timesteps": 500000,
47
  "_num_timesteps_at_start": 0,
48
  "seed": null,
49
  "action_noise": null,
50
- "start_time": 1654368074.201593,
51
  "learning_rate": 0.0003,
52
  "tensorboard_log": null,
53
  "lr_schedule": {
@@ -56,7 +56,7 @@
56
  },
57
  "_last_obs": {
58
  ":type:": "<class 'numpy.ndarray'>",
59
- ":serialized:": "gASVjQQAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyBLCIaUaAOMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiiUIABAAAzbPQvVKww7nYcec7J28BOZVTwDrGN6m5AACAPwAAgD/mpSW915MkuQdny7qztim1V7aVOsXI7zkAAIA/AACAP8CK5T2SqAg/7ZN3vS1vVb4/o7+7GCKDvQAAAAAAAAAApuCsvQPxVz+lqGG7sy+KvmwWNb3OCMw8AAAAAAAAAAAzerI9w/E9urk+NDvMGXO4gxyOOU662bkAAIA/AACAPzPhDr3DyTW6y2yHOqFs8zUM+MA5Ig2euQAAgD8AAIA/GvJuvdgA1T76p1Y+f1uEvhv7jj1LYnA9AAAAAAAAAABm87o+Q4kBP1ZAcb5/WGu+WTfvPToblL0AAAAAAAAAABoBfL3DEQ+6KtyCOsRwlzVeK3g5MwqWuQAAgD8AAIA/IFRjPggOJD8P9pW9j2iIvqivXz0ZsbE8AAAAAAAAAADz+mC+4fiCvLycszrjcM84bpruPWIW2bkAAIA/AACAP5ocKL32TDu6TRkDPGyGZDV3Dbs65thaNAAAgD8AAIA/AJL4vK7nm7ouKIY5q41ZNBNgBruTU5q4AACAPwAAgD9mE668UlDAuQ4AQToGHoc2MG2sOmMuY7kAAIA/AACAPzO8oTz2nAy6gWcGvPPsYjXB9u65ffjHtAAAgD8AAIA/lmCCPpxFnT96l84+jqbGvuEfSD51mxY9AAAAAAAAAAAaUbO9KUAiumaF5rpOeeQ1dpgvOsITVrUAAAAAAACAPwCqgr3/wZ0/M7Byvnauh74Ig+m779U2vQAAAAAAAAAAwImVPeFak7pC/yG8CdyEtbelNLvD8uY0AACAPwAAgD9NIF49w+VMuj2mtrtXwpk4vcTruruoebcAAIA/AACAP5pu/zyPhne68/Zfu7DHTTiJg1c7oPD6OQAAgD8AAIA/qlqlPq0PBT9C6GK+evp3vqrHCT06KPE8AAAAAAAAAAAAMCm8SHXSupJcbDy3waG8r/IAvMNxiL0AAIA/AACAPxqlDT7PSSa8aPUXPA/2nzwIaom9VsuDPQAAgD8AAIA/mgmKu3cBtT++bxq9lv24vYalbLt1sy+9AAAAAAAAAABmyUS9KbhMuvxTkjp8nuO1FLNrusCgrLkAAIA/AACAPzq5DL4K8Wk8iHPLPcw7gL7LOwQ7+uPPPQAAAAAAAAAApgHWvSzSfT+WtjW9bXhhvuP2Z709dEc8AAAAAAAAAABmA3G99gRruhUj/jneHW0wfzSiunpyE7kAAIA/AACAP5qRzDtIS5C6inb+utGXi7bUVLA62y36NQAAgD8AAIA/ZrypvSnoZLotS/+4FQ4vtEgG0DiW7hA4AACAPwAAgD8m+Ue+yp2UPyqos755Mn++SGhKvg6YYL0AAAAAAAAAAJR0lGIu"
60
  },
61
  "_last_episode_starts": {
62
  ":type:": "<class 'numpy.ndarray'>",
@@ -66,16 +66,16 @@
66
  "_episode_num": 0,
67
  "use_sde": false,
68
  "sde_sample_freq": -1,
69
- "_current_progress_remaining": 0.08249600000000001,
70
  "ep_info_buffer": {
71
  ":type:": "<class 'collections.deque'>",
72
- ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
73
  },
74
  "ep_success_buffer": {
75
  ":type:": "<class 'collections.deque'>",
76
  ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
77
  },
78
- "_n_updates": 120,
79
  "n_steps": 1024,
80
  "gamma": 0.999,
81
  "gae_lambda": 0.98,
 
4
  ":serialized:": "gASVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param sde_net_arch: Network architecture for extracting features\n when using gSDE. If None, the latent features from the policy will be used.\n Pass an empty list to use the states as features.\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function ActorCriticPolicy.__init__ at 0x7fa79df12320>",
8
+ "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7fa79df123b0>",
9
+ "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7fa79df12440>",
10
+ "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7fa79df124d0>",
11
+ "_build": "<function ActorCriticPolicy._build at 0x7fa79df12560>",
12
+ "forward": "<function ActorCriticPolicy.forward at 0x7fa79df125f0>",
13
+ "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7fa79df12680>",
14
+ "_predict": "<function ActorCriticPolicy._predict at 0x7fa79df12710>",
15
+ "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7fa79df127a0>",
16
+ "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7fa79df12830>",
17
+ "predict_values": "<function ActorCriticPolicy.predict_values at 0x7fa79df128c0>",
18
  "__abstractmethods__": "frozenset()",
19
+ "_abc_impl": "<_abc_data object at 0x7fa79df67600>"
20
  },
21
  "verbose": 1,
22
  "policy_kwargs": {},
 
42
  "_np_random": null
43
  },
44
  "n_envs": 32,
45
+ "num_timesteps": 1015808,
46
+ "_total_timesteps": 1000000,
47
  "_num_timesteps_at_start": 0,
48
  "seed": null,
49
  "action_noise": null,
50
+ "start_time": 1654368528.224836,
51
  "learning_rate": 0.0003,
52
  "tensorboard_log": null,
53
  "lr_schedule": {
 
56
  },
57
  "_last_obs": {
58
  ":type:": "<class 'numpy.ndarray'>",
59
+ ":serialized:": "gASVjQQAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSyBLCIaUaAOMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiiUIABAAAAN6BvGHc2j2B9Y49IIAuvj6y4zxSsLE9AAAAAAAAAAAz86I5XCsIut76H7k77xS2Zw7aOvbvjTUAAIA/AACAPzM27DxI7Ye6Fjmzu/u9YzjFwwM7jrO2NwAAgD8AAIA/pT6avqTRZrtcF786ghuiN5OxrDxGJN65AACAPwAAgD9AOis+HL2UPkBTXL6XLGy+q8DCvHZd+rwAAAAAAAAAAMUcsb68x9m9cng/ODOzVzdb4/w+/AGdtwAAgD8AAIA/wG4Avns8r7rcVKG7miQNuY3FNTsK4/E5AACAPwAAgD+aUcc8FtJbPydHxL3Mzr6+2dF8vCZphb0AAAAAAAAAAHMCpb66TQA/OiKAPdMtw77XTxK9ssIiPQAAAAAAAAAAaotvvqOXMj02Nr26yoEuObSCzb6oZ0W4AACAPwAAgD/mWV4+IbuHPw9uoT4kN4a+aWxIPkctIj0AAAAAAAAAAJoVJD0/Hq8/avijPmwpjL58P9Y8Hk7UPQAAAAAAAAAAgOacvcP9JbpSQ166Lac+NfuUILt1A4E5AACAPwAAgD/N2su8j/pXutWp6TrVj7w0yw7uuvFyBroAAIA/AACAP+ATdb6rWIU/sjCfvuEgtr5hCEC+IJQCuwAAAAAAAAAAWvZ6vgMhI7zrzV+8X2XfuV4uhz1LNLk6AACAPwAAgD+mdHm+gMFhP57Rqb6XCqe+Shg/vttrjr0AAAAAAAAAAJpZgrmP3kC6HsGOumwHD7ZwxMO4ZHCENQAAgD8AAIA/cwC2vY+GWboMwI66pXccthF1QjuAN6Q5AACAPwAAgD9wE0++J8WfP9D//74ce/a+jco3vvXGbb0AAAAAAAAAAJp4Q71SIIe5me3Iu5E7kjhtb687gMBuOgAAgD8AAIA/zQwBu4cUvD/1jq68drOnvTHtarxgA+48AAAAAAAAAABmnFE8KQAyumZoXTo4ZwQ2VaZZOyYHgLkAAIA/AACAPxYAbb7cXUa8eCr3t+d93LWRza89fksUNwAAgD8AAIA/AKB4uvY0aLruczM4MXWxM82ozDruVk23AACAPwAAgD9mFpM8j4Jjuj27+7tjyi43ZgsVu1p0mLYAAIA/AACAPzgxoL4wey4/jiYOvhzj1b6oG5K+UifdPQAAAAAAAAAAMytqvCksErqKaxe6BlHvtRft+boMfiw5AACAPwAAgD9mhMk9SOWGutoqfDtaF2U4rXEROzLlY7kAAIA/AACAP01+Vb1cZ1a6tvljuQDQTDK+rQa7ageEOAAAgD8AAIA/ZkmjvHuWl7oew6U6gHyVNIi6Gboscr25AACAPwAAgD8znXu8w6Vxun6cHjsra4q2OXUgu/q6NboAAIA/AACAP5R0lGIu"
60
  },
61
  "_last_episode_starts": {
62
  ":type:": "<class 'numpy.ndarray'>",
 
66
  "_episode_num": 0,
67
  "use_sde": false,
68
  "sde_sample_freq": -1,
69
+ "_current_progress_remaining": -0.015808000000000044,
70
  "ep_info_buffer": {
71
  ":type:": "<class 'collections.deque'>",
72
+ ":serialized:": "gASVchAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIS3ZsBOIjYUCUhpRSlIwBbJRN6AOMAXSUR0CNe1igkC3gdX2UKGgGaAloD0MIUU1J1uHIBECUhpRSlGgVS/RoFkdAjYRXq7iAD3V9lChoBmgJaA9DCEInhA46dGBAlIaUUpRoFU3oA2gWR0CNiP51vES/dX2UKGgGaAloD0MINQcI5mjMYkCUhpRSlGgVTegDaBZHQI2Qtgc94eN1fZQoaAZoCWgPQwiXGwx1WElVQJSGlFKUaBVN6ANoFkdAjZGuM2m52HV9lChoBmgJaA9DCK5JtyVyfWFAlIaUUpRoFU3oA2gWR0CNkmf29L6DdX2UKGgGaAloD0MIPiR872/dYECUhpRSlGgVTegDaBZHQI2Y7B2wFC91fZQoaAZoCWgPQwiAKJgxBRsTwJSGlFKUaBVL3WgWR0CNmgI8hcJMdX2UKGgGaAloD0MI8PlhhHBAYkCUhpRSlGgVTegDaBZHQI2gqhJyyUt1fZQoaAZoCWgPQwh+calKWx5cQJSGlFKUaBVN6ANoFkdAjaPejua4MHV9lChoBmgJaA9DCM5SspyEVGFAlIaUUpRoFU3oA2gWR0CNswU/wAlwdX2UKGgGaAloD0MIcR5OYDoOXkCUhpRSlGgVTegDaBZHQI25Aw22oeh1fZQoaAZoCWgPQwgaMbPP43xhQJSGlFKUaBVN6ANoFkdAjcbARsdkrnV9lChoBmgJaA9DCOS/QBCgPmJAlIaUUpRoFU3oA2gWR0CNyW0cfeUIdX2UKGgGaAloD0MIXI3sSssUQkCUhpRSlGgVS99oFkdAjc2LylN1yXV9lChoBmgJaA9DCEYJ+gs9+FtAlIaUUpRoFU3oA2gWR0CN0h93r2QGdX2UKGgGaAloD0MIMnGrIAYIXECUhpRSlGgVTegDaBZHQI3W+yE+Pil1fZQoaAZoCWgPQwheonpr4F1gQJSGlFKUaBVN6ANoFkdAjdtqNhmXgXV9lChoBmgJaA9DCApoImz4zmBAlIaUUpRoFU3oA2gWR0CN5JyJbdJrdX2UKGgGaAloD0MIVRSvsrbHX0CUhpRSlGgVTegDaBZHQI3nRpg1FYx1fZQoaAZoCWgPQwguBDkoYehiQJSGlFKUaBVN6ANoFkdAjgG6naWX1XV9lChoBmgJaA9DCBjpRe1+5mRAlIaUUpRoFU3oA2gWR0COA0GxD9fkdX2UKGgGaAloD0MIx6F+F7YyNcCUhpRSlGgVS9JoFkdAjgXJEH+qBHV9lChoBmgJaA9DCPyPTIdOjwxAlIaUUpRoFUv3aBZHQI4GCA6Mir11fZQoaAZoCWgPQwibkUHuInhdQJSGlFKUaBVN6ANoFkdAjgZVIiC8OHV9lChoBmgJaA9DCN2271F/XWFAlIaUUpRoFU3oA2gWR0COUqkt29tedX2UKGgGaAloD0MIjdXm/9VXZECUhpRSlGgVTegDaBZHQI5gi6MBIWh1fZQoaAZoCWgPQwg3qP3WzrpkQJSGlFKUaBVN6ANoFkdAjmZ6pHZsbnV9lChoBmgJaA9DCE637BD/qWFAlIaUUpRoFU3oA2gWR0COZ9Kf4AS4dX2UKGgGaAloD0MI46lHGlyVYECUhpRSlGgVTegDaBZHQI5zxPhybQV1fZQoaAZoCWgPQwhDWI0lLLRjQJSGlFKUaBVN6ANoFkdAjnTclPacqnV9lChoBmgJaA9DCCzzVl2HijZAlIaUUpRoFUvoaBZHQI6haMUAT7F1fZQoaAZoCWgPQwihvfp4aF9jQJSGlFKUaBVN6ANoFkdAjqJG7SRbKXV9lChoBmgJaA9DCEeSIFyBOGFAlIaUUpRoFU3oA2gWR0COpddO6/ZedX2UKGgGaAloD0MIWKzhIndyaECUhpRSlGgVTegDaBZHQI6sH1rZamp1fZQoaAZoCWgPQwjUfJV8bBRgQJSGlFKUaBVN6ANoFkdAjq5wazeGf3V9lChoBmgJaA9DCIBFfv2QaGNAlIaUUpRoFU3oA2gWR0COt6pON5t4dX2UKGgGaAloD0MIMJ+sGC6OYkCUhpRSlGgVTegDaBZHQI7A8IcBEKF1fZQoaAZoCWgPQwi4yagyjLdmQJSGlFKUaBVN6ANoFkdAjsV1v2oNu3V9lChoBmgJaA9DCGWryykBjl5AlIaUUpRoFU3oA2gWR0COzJMYdhiLdX2UKGgGaAloD0MIsdtnlRm1YkCUhpRSlGgVTegDaBZHQI7NdOEdvKl1fZQoaAZoCWgPQwjXpNsSufNcQJSGlFKUaBVN6ANoFkdAjs4a9K28ZnV9lChoBmgJaA9DCEc5mE2AVWBAlIaUUpRoFU3oA2gWR0CO0+qbSZ0CdX2UKGgGaAloD0MIWK63zVTPW0CUhpRSlGgVTegDaBZHQI7a9/QSi/R1fZQoaAZoCWgPQwhjQzf7gw5gQJSGlFKUaBVN6ANoFkdAjt3H2RJVbXV9lChoBmgJaA9DCPusMlNafy7AlIaUUpRoFUviaBZHQI7iAeT3Zf51fZQoaAZoCWgPQwi+E7NeDANfQJSGlFKUaBVN6ANoFkdAjuukBCD28XV9lChoBmgJaA9DCHNKQEzCQGBAlIaUUpRoFU3oA2gWR0CO8PVbzK9xdX2UKGgGaAloD0MIL0/nilJAS0CUhpRSlGgVS+hoFkdAjvZQ0O3DvXV9lChoBmgJaA9DCO9TVWggLmJAlIaUUpRoFU3oA2gWR0CO/V22Xsw+dX2UKGgGaAloD0MIKChFK/daYUCUhpRSlGgVTegDaBZHQI8DTadtl7N1fZQoaAZoCWgPQwjww0FClG8hwJSGlFKUaBVNBwFoFkdAjwRZMURFqnV9lChoBmgJaA9DCL75DRMN3VdAlIaUUpRoFU3oA2gWR0CPC4YCQtBfdX2UKGgGaAloD0MIRSkhWFVtV0CUhpRSlGgVTegDaBZHQI8PYUzsQd11fZQoaAZoCWgPQwgJwD+lSiNjQJSGlFKUaBVN6ANoFkdAjxdK77Kq43V9lChoBmgJaA9DCLxZg/fVX2JAlIaUUpRoFU3oA2gWR0CPGhLlmvnsdX2UKGgGaAloD0MI0T3rGi2HKcCUhpRSlGgVS+VoFkdAjy2uZ1FH8XV9lChoBmgJaA9DCHEEqRQ7+GZAlIaUUpRoFU3oA2gWR0CPOR4UN8VpdX2UKGgGaAloD0MI9fOmIpWcYkCUhpRSlGgVTegDaBZHQI86mDHwPRR1fZQoaAZoCWgPQwi4c2GkF0BhQJSGlFKUaBVN6ANoFkdAjzzMcABDHHV9lChoBmgJaA9DCFuwVBfwlGRAlIaUUpRoFU3oA2gWR0CPPQhAWznidX2UKGgGaAloD0MIZOWXwRjeW0CUhpRSlGgVTegDaBZHQI89V/QSi/R1fZQoaAZoCWgPQwhBf6FHjN4iQJSGlFKUaBVLxWgWR0CPPlKxLTQWdX2UKGgGaAloD0MIQDBHj18fZ0CUhpRSlGgVTegDaBZHQI9ANNet0V91fZQoaAZoCWgPQwjkZyPXTX0xQJSGlFKUaBVL1GgWR0CPjwXb/Ot5dX2UKGgGaAloD0MIo7CLoocEYECUhpRSlGgVTegDaBZHQI+WHBciW3V1fZQoaAZoCWgPQwgiiV5GsVVUQJSGlFKUaBVN6ANoFkdAj5twE6kqMHV9lChoBmgJaA9DCN/cXz1uj2VAlIaUUpRoFU3oA2gWR0CPp1nMdLg5dX2UKGgGaAloD0MInOCbps9eX0CUhpRSlGgVTegDaBZHQI+oR99c8kl1fZQoaAZoCWgPQwglrfiGwmf3v5SGlFKUaBVLs2gWR0CPqXmK64DtdX2UKGgGaAloD0MIcCNli6S9G8CUhpRSlGgVS+NoFkdAj7XemelKsnV9lChoBmgJaA9DCEOOrWcIlydAlIaUUpRoFUvMaBZHQI/GJ4W1twd1fZQoaAZoCWgPQwg7w9SWOh1fQJSGlFKUaBVN6ANoFkdAj84A7YChe3V9lChoBmgJaA9DCACo4sYtn15AlIaUUpRoFU3oA2gWR0CPztBJqZc+dX2UKGgGaAloD0MIdQZGXtblZkCUhpRSlGgVTegDaBZHQI/RldLQHA11fZQoaAZoCWgPQwhmh/iHLUtmQJSGlFKUaBVN6ANoFkdAj9aeg13t8nV9lChoBmgJaA9DCBGsqpffM1xAlIaUUpRoFU3oA2gWR0CP4I7p3X7MdX2UKGgGaAloD0MISvCGNKoxY0CUhpRSlGgVTegDaBZHQI/opdfLLZB1fZQoaAZoCWgPQwg7ONibmLxiQJSGlFKUaBVN6ANoFkdAj/OgtnPE9HV9lChoBmgJaA9DCEOtad7xdmVAlIaUUpRoFU3oA2gWR0CP9Sy57PY4dX2UKGgGaAloD0MIcQFolK6mZUCUhpRSlGgVTegDaBZHQI/8OUSqU/x1fZQoaAZoCWgPQwi214LemyphQJSGlFKUaBVN6ANoFkdAkAHC3PRiPXV9lChoBmgJaA9DCGiVmdL6pWNAlIaUUpRoFU3oA2gWR0CQA0ukk8ifdX2UKGgGaAloD0MIc/G3PcELYUCUhpRSlGgVTegDaBZHQJAFeDVYp2F1fZQoaAZoCWgPQwjxZDcz+nNiQJSGlFKUaBVN6ANoFkdAkApMg2ZRbnV9lChoBmgJaA9DCEN0CBwJ51xAlIaUUpRoFU3oA2gWR0CQDSdO6/ZedX2UKGgGaAloD0MI2q1lMhyIYUCUhpRSlGgVTegDaBZHQJATo+B6KLt1fZQoaAZoCWgPQwiTpkHRPBxjQJSGlFKUaBVN6ANoFkdAkBbPJV81GnV9lChoBmgJaA9DCHnKarqekGJAlIaUUpRoFU3oA2gWR0CQF2gnc+JQdX2UKGgGaAloD0MIur2kMVqKWkCUhpRSlGgVTegDaBZHQJAhylhw2l51fZQoaAZoCWgPQwihR4ye25JmQJSGlFKUaBVN6ANoFkdAkCMSK3uuzXV9lChoBmgJaA9DCL68APtoS2ZAlIaUUpRoFU3oA2gWR0CQKnE/SpirdX2UKGgGaAloD0MI1/hM9s+vM0CUhpRSlGgVS9poFkdAkC01IiC8OHV9lChoBmgJaA9DCE30+SgjS19AlIaUUpRoFU3oA2gWR0CQMBkRSP2gdX2UKGgGaAloD0MIud42U6FKZkCUhpRSlGgVTegDaBZHQJAyDZrYXft1fZQoaAZoCWgPQwiKkLqd/epkQJSGlFKUaBVN6ANoFkdAkDIv7aZhKHV9lChoBmgJaA9DCBwkRPkCWWFAlIaUUpRoFU3oA2gWR0CQMuS39aUzdX2UKGgGaAloD0MIj95wH7m4XkCUhpRSlGgVTegDaBZHQJAz6wcHWz51fZQoaAZoCWgPQwgeGED40JZoQJSGlFKUaBVNeQFoFkdAkDX7/bTMJXV9lChoBmgJaA9DCAmH3uLhBFlAlIaUUpRoFU3oA2gWR0CQNqOwgTysdWUu"
73
  },
74
  "ep_success_buffer": {
75
  ":type:": "<class 'collections.deque'>",
76
  ":serialized:": "gASVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
77
  },
78
+ "_n_updates": 124,
79
  "n_steps": 1024,
80
  "gamma": 0.999,
81
  "gae_lambda": 0.98,
ppo-LunarLander-v2/policy.optimizer.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2bae89841d935c646edbc89c3edd2f791154cec0e99155ef470a93e12814aa13
3
  size 84829
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca72c0667bf09880066698a0ac9e69506a7d65b645de9cb531ba602825b899d9
3
  size 84829
ppo-LunarLander-v2/policy.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c32feb652c5d3479bb5a695819d9f808ac23158b01f25f501d2df4503ae8cdf
3
  size 43201
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60b6b43ed521c8f95e77712d7b78dd1f724048158b98bc263738a807ec43387f
3
  size 43201
replay.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01d7f56cc8ced05307cab5c4785da890024787a85e5e4437e72bed8742899210
3
+ size 250971
results.json CHANGED
@@ -1 +1 @@
1
- {"mean_reward": 230.45827497478308, "std_reward": 23.941731861767668, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2022-06-04T18:44:47.874161"}
 
1
+ {"mean_reward": 252.71244280242945, "std_reward": 23.211099359751497, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2022-06-04T19:06:38.411216"}