ernestum commited on
Commit
baad4bf
·
1 Parent(s): 6a938ff

Initial commit

Browse files
.gitattributes CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
28
+ *.mp4 filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - seals/Hopper-v0
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: SAC
10
+ results:
11
+ - metrics:
12
+ - type: mean_reward
13
+ value: 2330.52 +/- 138.95
14
+ name: mean_reward
15
+ task:
16
+ type: reinforcement-learning
17
+ name: reinforcement-learning
18
+ dataset:
19
+ name: seals/Hopper-v0
20
+ type: seals/Hopper-v0
21
+ ---
22
+
23
+ # **SAC** Agent playing **seals/Hopper-v0**
24
+ This is a trained model of a **SAC** agent playing **seals/Hopper-v0**
25
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3)
26
+ and the [RL Zoo](https://github.com/DLR-RM/rl-baselines3-zoo).
27
+
28
+ The RL Zoo is a training framework for Stable Baselines3
29
+ reinforcement learning agents,
30
+ with hyperparameter optimization and pre-trained agents included.
31
+
32
+ ## Usage (with SB3 RL Zoo)
33
+
34
+ RL Zoo: https://github.com/DLR-RM/rl-baselines3-zoo<br/>
35
+ SB3: https://github.com/DLR-RM/stable-baselines3<br/>
36
+ SB3 Contrib: https://github.com/Stable-Baselines-Team/stable-baselines3-contrib
37
+
38
+ ```
39
+ # Download model and save it into the logs/ folder
40
+ python -m utils.load_from_hub --algo sac --env seals/Hopper-v0 -orga ernestumorga -f logs/
41
+ python enjoy.py --algo sac --env seals/Hopper-v0 -f logs/
42
+ ```
43
+
44
+ ## Training (with the RL Zoo)
45
+ ```
46
+ python train.py --algo sac --env seals/Hopper-v0 -f logs/
47
+ # Upload the model and generate video (when possible)
48
+ python -m utils.push_to_hub --algo sac --env seals/Hopper-v0 -f logs/ -orga ernestumorga
49
+ ```
50
+
51
+ ## Hyperparameters
52
+ ```python
53
+ OrderedDict([('batch_size', 128),
54
+ ('buffer_size', 100000),
55
+ ('gamma', 0.98),
56
+ ('learning_rate', 0.001709807687567946),
57
+ ('learning_starts', 1000),
58
+ ('n_timesteps', 1000000.0),
59
+ ('policy', 'MlpPolicy'),
60
+ ('policy_kwargs',
61
+ 'dict(net_arch=[256, 256], log_std_init=-1.6829391077276037)'),
62
+ ('tau', 0.08),
63
+ ('train_freq', 32),
64
+ ('normalize', False)])
65
+ ```
args.yml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !!python/object/apply:collections.OrderedDict
2
+ - - - algo
3
+ - sac
4
+ - - device
5
+ - cpu
6
+ - - env
7
+ - seals/Hopper-v0
8
+ - - env_kwargs
9
+ - null
10
+ - - eval_episodes
11
+ - 5
12
+ - - eval_freq
13
+ - 25000
14
+ - - gym_packages
15
+ - []
16
+ - - hyperparams
17
+ - null
18
+ - - log_folder
19
+ - seals_experts
20
+ - - log_interval
21
+ - -1
22
+ - - n_eval_envs
23
+ - 1
24
+ - - n_evaluations
25
+ - null
26
+ - - n_jobs
27
+ - 1
28
+ - - n_startup_trials
29
+ - 10
30
+ - - n_timesteps
31
+ - -1
32
+ - - n_trials
33
+ - 500
34
+ - - no_optim_plots
35
+ - false
36
+ - - num_threads
37
+ - 4
38
+ - - optimization_log_path
39
+ - null
40
+ - - optimize_hyperparameters
41
+ - false
42
+ - - pruner
43
+ - median
44
+ - - sampler
45
+ - tpe
46
+ - - save_freq
47
+ - -1
48
+ - - save_replay_buffer
49
+ - false
50
+ - - seed
51
+ - 1850693542
52
+ - - storage
53
+ - null
54
+ - - study_name
55
+ - null
56
+ - - tensorboard_log
57
+ - ''
58
+ - - total_n_trials
59
+ - null
60
+ - - track
61
+ - false
62
+ - - trained_agent
63
+ - ''
64
+ - - truncate_last_trajectory
65
+ - true
66
+ - - uuid
67
+ - false
68
+ - - vec_env
69
+ - dummy
70
+ - - verbose
71
+ - 1
72
+ - - wandb_entity
73
+ - null
74
+ - - wandb_project_name
75
+ - sb3
config.yml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !!python/object/apply:collections.OrderedDict
2
+ - - - batch_size
3
+ - 128
4
+ - - buffer_size
5
+ - 100000
6
+ - - gamma
7
+ - 0.98
8
+ - - learning_rate
9
+ - 0.001709807687567946
10
+ - - learning_starts
11
+ - 1000
12
+ - - n_timesteps
13
+ - 1000000.0
14
+ - - policy
15
+ - MlpPolicy
16
+ - - policy_kwargs
17
+ - dict(net_arch=[256, 256], log_std_init=-1.6829391077276037)
18
+ - - tau
19
+ - 0.08
20
+ - - train_freq
21
+ - 32
env_kwargs.yml ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
replay.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34bb534aed94f18c3da7014a4341b56c75d29fb85e20a93bf8c95dd3d21cd81b
3
+ size 1596963
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": 2330.5196060000003, "std_reward": 138.9460415926395, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2022-07-11T14:37:11.855711"}
sac-seals-Hopper-v0.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5d709ab00b3c67fe3e116e834a359b4d8749099a1770a21fdc574eeb5803643
3
+ size 3136607
sac-seals-Hopper-v0/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 1.5.1a8
sac-seals-Hopper-v0/actor.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bdbb34ddda96c269af924c94a065bb34dd276f55d9d3b87316e2e8eb12c81f1
3
+ size 569461
sac-seals-Hopper-v0/critic.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:937ea0a7f4cde9cac7b0160586299c37b361c0094b7e1c7b9461b4221811e48a
3
+ size 1128541
sac-seals-Hopper-v0/data ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLnNhYy5wb2xpY2llc5SMCVNBQ1BvbGljeZSTlC4=",
5
+ "__module__": "stable_baselines3.sac.policies",
6
+ "__doc__": "\n Policy class (with both actor and critic) for SAC.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param sde_net_arch: Network architecture for extracting features\n when using gSDE. If None, the latent features from the policy will be used.\n Pass an empty list to use the states as features.\n :param use_expln: Use ``expln()`` function instead of ``exp()`` when using gSDE to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param clip_mean: Clip the mean output when using gSDE to avoid numerical instability.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n :param n_critics: Number of critic networks to create.\n :param share_features_extractor: Whether to share or not the features extractor\n between the actor and the critic (this saves computation time)\n ",
7
+ "__init__": "<function SACPolicy.__init__ at 0x7f7d92860e50>",
8
+ "_build": "<function SACPolicy._build at 0x7f7d92860ee0>",
9
+ "_get_constructor_parameters": "<function SACPolicy._get_constructor_parameters at 0x7f7d92860f70>",
10
+ "reset_noise": "<function SACPolicy.reset_noise at 0x7f7d9286b040>",
11
+ "make_actor": "<function SACPolicy.make_actor at 0x7f7d9286b0d0>",
12
+ "make_critic": "<function SACPolicy.make_critic at 0x7f7d9286b160>",
13
+ "forward": "<function SACPolicy.forward at 0x7f7d9286b1f0>",
14
+ "_predict": "<function SACPolicy._predict at 0x7f7d9286b280>",
15
+ "set_training_mode": "<function SACPolicy.set_training_mode at 0x7f7d9286b310>",
16
+ "__abstractmethods__": "frozenset()",
17
+ "_abc_impl": "<_abc_data object at 0x7f7d9285fc90>"
18
+ },
19
+ "verbose": 1,
20
+ "policy_kwargs": {
21
+ "net_arch": [
22
+ 256,
23
+ 256
24
+ ],
25
+ "log_std_init": -1.6829391077276037,
26
+ "use_sde": false
27
+ },
28
+ "observation_space": {
29
+ ":type:": "<class 'gym.spaces.box.Box'>",
30
+ ":serialized:": "gAWVJwIAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLDIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWYAAAAAAAAAAAAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P+UaApLDIWUjAFDlHSUUpSMBGhpZ2iUaBIolmAAAAAAAAAAAAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/lGgKSwyFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWDAAAAAAAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLDIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYMAAAAAAAAAAAAAAAAAAAAAAAAAJRoIUsMhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==",
31
+ "dtype": "float64",
32
+ "_shape": [
33
+ 12
34
+ ],
35
+ "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]",
36
+ "high": "[inf inf inf inf inf inf inf inf inf inf inf inf]",
37
+ "bounded_below": "[False False False False False False False False False False False False]",
38
+ "bounded_above": "[False False False False False False False False False False False False]",
39
+ "_np_random": null
40
+ },
41
+ "action_space": {
42
+ ":type:": "<class 'gym.spaces.box.Box'>",
43
+ ":serialized:": "gAWV9QsAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UjBRudW1weS5yYW5kb20uX3BpY2tsZZSMEl9fcmFuZG9tc3RhdGVfY3RvcpSTlIwHTVQxOTkzN5SFlFKUfZQojA1iaXRfZ2VuZXJhdG9ylGgwjAVzdGF0ZZR9lCiMA2tleZRoEiiWwAkAAAAAAAAAAACAU8KznIcDtZNy7Ktb6Oay8s+2gdrVBu9hoTFNoGu1zNkT5hifdJx5L8ilG4DEeQFJng9D5F3gGJOSE1XM1EopZNIIlb400J5EcnoD8K2/CnObez7pYLEG2nUDRQtufdYWausENGaDt/P1pS9p70JjQ7Vc98J3UsxGRDctCIlu0I6ud/sYtoBPe575TzLsEti5jl6FqRnKrj12LWcrQoCexe7HH/UiAV1LzyQPzBlSZERXmHCdCvUSF7XpWt47xP9BzzqxX7aH3TPYWImqos1/ez/JlLdsD0MfMZl9G2CQq7cHHRlM3sj7jroA9c+pGt4l/iAGpRb80HbjwU71ykPTAVp531BXrc2qmIU6z9Fh4TAPx7fZ1kVF+L1Irlou+4Ckky7Ys59nB7KkciTI+N5jlb62ybZt0+ZWgIA6LKLvdx/mTQtB4k1aplT/C7L9/ybKCFn2quN/7YlIkxoH1U0xdabG6rgOrR+SHMmvUwvtKB+19Ibb07mSgVQyjNAvnyADPJf3pkxylZtn7f/OVpWEaWfl6BcLwy0grrEgUK+H+8P8XWMuBginXgwzn3sy4+ZOlr45op6TtuqX0Knz/SySGDlBIK8JqKObzB6fGt+ovJHEM8KlL4veKwkLkuuMWBaex3FBdWskry5qhslxMgnk2thh8DaXmAfbuI8j0SqHMW1kleITi9ekfXx/eSi5hX1GjA/M62Zixuay1H8zH9VjsTRcGacyJ0vh1hNReDFoNsXFbLfLqaIvbLDQjY7T289ZXsupvAxu2GVTbqWst+ckPPzwH7vLikULC+weAKwxarqm+ugAXgyz774meHOsvQYuu18nvrrunjZWDvwaKuYohEwUfSnpotE9XhX99yUTc8sGPQidTfXkzm/t8MWP8it4l4VSEgDLn8GW8t2DAh8EwFa/KOGoZEGjYqZ2IMA70E+F2LqgaZlQLFMONTIx3yuN5F2e1MT4v2wdBRK9R+lGMpxIiNldyOwwxLDBTRDMhd7APidmDwQBnvaIecKFa95btwHkRBEUT5g++/I0DDg685EX4OMO2YtTPqM3PQluS4puEhAQRVukNGSh4gYDgcBPKZl4ThNf+G+E7El9fmWJcP39Sifw6Mn+GEisM1RhHY05XZHUv5W4r8kD2jSLMY+IIL2+LtQrW7it7y28+sEicLoEfYOky9ZJF6l0fR+sXEawf+REH9LvtRJ4yzfxr7KisNpr1axv1ae5CDXS+XTzuOG/BJnHvt8arnY1XWH9SdkCOeok6MI8GBCtjTCxJ5JbpI5J0i0A66mJaRW9LMfP6Cil3/cVRQ9uN2KTtV3o7rJwY4XCnj7DJmqrUwofDDl7Ek0PoN7w0Hh8YHOy8qhPw7V8ALdjZn7eYtjCQIldQvHbM1I73RtCLQvQGFMXUCJ022pGRqTvZX5XWSizqbgX6TJmI6LDF9wcpYealB7cDwelfqdpzHRmyjRbIX9b+w4uj//aDRgP2SgiOAq/D/9/0SbgK/E0FQyclhNVAkbKwXhAxKGczpvJow0mFFUAt/5fT5KAsmQTAt8p0FsrGMDTfk4RzZgqZSm+ihVRS371Tx3twpGA1goo/AIfJh8slJC3hkR1OGCN7LAPGCwbM9rHlKSU4uuhJiff196h9q1kPMld6989MfKLVkvCl7ofCRurPUW46ceJKE951sQD1v8cK0HK1JmuBTCXAelCUCIFNLGk3tMXNVmuuFF3o3xb4V4T1IAYIfBdyEVHhIIZOE/JEY79daQw8njYEtQ6YwZ6kNCBYfrjq2OglITcRdwDmINL42ro6HnbWgLZQ8Ce/EiPVBtWHwhvGUHK1FNONzRzXgT1zKEg+WAigeuK4QVIxdITM4YvUyYvpQJuJd+xGD1no7BYIKXdV4aDlsRnWSMmS+zTyTvC0+TgBMCNpMvdChjaB/XTrMVsm0vgPmCYswn067MTYWfm5oCqqmNciqoRfFL2O2mxFT1VMcKDrxHBdBUhSG5UmAerx86KAEytbsCbn6OOj8Y02VwVynzXd0WJfLioeGMZISM1eneWfTc1mQ6CpdDxJqUmU86/KsBL3Bb0S2NAqFysFJZKxDwLej8xz+xH8IxEHzlkiiNH+2IIq0663FAwi6wg6dgcryDqQ+lNDwn898nylrcYShigDrtrFBNezKx3ZjpkPCnPUeQB4hJUrYCUJy5CyytC/x1UsByKez/aSNEWnlWnzYdJf2PoKL0YfmaR3KpXzi9ax3BHPgk1cdmgdVkqevFJ0DUdTBFQj/mhaKqcaT0rKJLgy/11AhWW4nX7+kAdgR0b1iAseI0TbMDtohBuqqUZfqMfUKsdI8v2aeUd0+IqOjPBFe7TZRC7OUYmf789SRTpw9gst4tzx7tLap8JnFt2keKhqd3vBgqpvlsxvx0DcPC+bo/qIldKiAn5D7TPjeWLzJ1gmpk1mVKOyWOv/ZzlRTfe8yEsMsRcgdPxbOuxLjlOwo1uFh9NjHoOz/xbnI62I49ZzT59GUCNtAL74UqjlRoyXZ5ELEjhTn+F5fYfEkY2TnSsgKO4Wwb/xD41S4mBL7LcUyF76ybV7Yx0L6V2QGoSfyhHFqMQJs/haLPPW18mWJb/UDl90ZN9TEzcdXvZsmCeqzCagC6YDHp3fop+5nAQSnT/Byt2j7z+6cnl/aZh6oKs5xrEMmuzpLFbXNVof9hNmX5E0DQ2M8uBqqeW95p6z8ySnOxURAO28oYWsbVyeYaNlWLZrOtIMZDRjjbecSSwMLlrBhw4mZVht4DgOQxI1+P7sPHZLMf89U+5ctf1rD0r1AXgyXjzOxKvCxWMhrz6Ah19+zal/bAIpw+0V7Pq85PRQO4UeScmMwODR8jcOfILuMmo7xXhemY/JqtOncklEaGapMeGlkiefvQkx9L5EWvLn6stI4zRP4pZXx9iOz17IKJmKOVHgCIAOiheb0bwkjNkItlfYO3LzeLLPuBDNLFg7tQu5NPWy28a4nBsE/gsyEteRvF2ECYFIOJg06dzc77IWw7o+z1Q5APxLg9uvyFniYWNuJyk7rflLCmYcg1gN657CWff8YfPr0ukKOamco94X1nFdyroxHiQlRXaP91DOqMueI1pCasyRQt0jtbWwxdEVyzP3GzUZXBWqa0xXCzwe29cxg2aiwKuuAAVfaCE/Pt1cJXq8wvliF81sMDPMbowd9+uyWuExq/e+2W3wWeV3hVofoiEySjBrJPWVJW9++UocJbC0ppNw5mtHktkZqUk6kVtUgVQ4Cj4udj/bluZzcqWjIvOCJO52M+xcQY808Ei8T/lwwS9TguuzQ3e0KR7hptgNcX1/XhCvAuUaAeMAnU0lImIh5RSlChLA2gLTk5OSv////9K/////0sAdJRiTXAChZRoFXSUUpSMA3Bvc5RNcAJ1jAloYXNfZ2F1c3OUSwCMBWdhdXNzlEcAAAAAAAAAAHVidWIu",
44
+ "dtype": "float32",
45
+ "_shape": [
46
+ 3
47
+ ],
48
+ "low": "[-1. -1. -1.]",
49
+ "high": "[1. 1. 1.]",
50
+ "bounded_below": "[ True True True]",
51
+ "bounded_above": "[ True True True]",
52
+ "_np_random": "RandomState(MT19937)"
53
+ },
54
+ "n_envs": 1,
55
+ "num_timesteps": 1000000,
56
+ "_total_timesteps": 1000000,
57
+ "_num_timesteps_at_start": 0,
58
+ "seed": 0,
59
+ "action_noise": null,
60
+ "start_time": 1651241344.0300884,
61
+ "learning_rate": {
62
+ ":type:": "<class 'function'>",
63
+ ":serialized:": "gAWVmAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAEsBSwFLE0MEiABTAJROhZQpjAFflIWUjGgvbmFzL3VjYi9tYXhpbWlsaWFuL3JsLWJhc2VsaW5lczMtem9vL3ZlbnYvbGliL3B5dGhvbjMuOC9zaXRlLXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4BDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5RoDXVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9cA3QGb3jYhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
64
+ },
65
+ "tensorboard_log": null,
66
+ "lr_schedule": {
67
+ ":type:": "<class 'function'>",
68
+ ":serialized:": "gAWVmAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAEsBSwFLE0MEiABTAJROhZQpjAFflIWUjGgvbmFzL3VjYi9tYXhpbWlsaWFuL3JsLWJhc2VsaW5lczMtem9vL3ZlbnYvbGliL3B5dGhvbjMuOC9zaXRlLXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4BDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5RoDXVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgXaA6MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgYjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9cA3QGb3jYhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
69
+ },
70
+ "_last_obs": null,
71
+ "_last_episode_starts": {
72
+ ":type:": "<class 'numpy.ndarray'>",
73
+ ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
74
+ },
75
+ "_last_original_obs": {
76
+ ":type:": "<class 'numpy.ndarray'>",
77
+ ":serialized:": "gAWV1QAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJZgAAAAAAAAAAtomMjuwzFAfSCTXi3j0j8nz9U1GGT0PyGtPE15MNe/huB2paBDzL+NySmDTufTP7ItXJq/GARAvTUDIhPK67/3tokmwui3Pyjyie5HigHAI+29kSUHEEAM22KxCVgfQJSMBW51bXB5lIwFZHR5cGWUk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGJLAUsMhpSMAUOUdJRSlC4="
78
+ },
79
+ "_episode_num": 1000,
80
+ "use_sde": false,
81
+ "sde_sample_freq": -1,
82
+ "_current_progress_remaining": 0.0,
83
+ "ep_info_buffer": {
84
+ ":type:": "<class 'collections.deque'>",
85
+ ":serialized:": "gAWVgRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIwCZr1FNnnECUhpRSlIwBbJRN6AOMAXSUR0CMVxKTSsr/dX2UKGgGaAloD0MIVKcDWZ+xmECUhpRSlGgVTegDaBZHQIxf+YSg5BF1fZQoaAZoCWgPQwiWCFT/QFibQJSGlFKUaBVN6ANoFkdAjGdYe1a4c3V9lChoBmgJaA9DCIBmEB9Yg5tAlIaUUpRoFU3oA2gWR0CMbjcgQpWndX2UKGgGaAloD0MI3gAz38EymkCUhpRSlGgVTegDaBZHQIx1PigkC3h1fZQoaAZoCWgPQwjuQJ3yaP6YQJSGlFKUaBVN6ANoFkdAjHw/NZ/0/XV9lChoBmgJaA9DCH2zzY1JSptAlIaUUpRoFU3oA2gWR0CMg3Mvh60IdX2UKGgGaAloD0MIjGSPUEOmnkCUhpRSlGgVTegDaBZHQIyKTqjafz11fZQoaAZoCWgPQwhTWRR2seSbQJSGlFKUaBVN6ANoFkdAjJFVmapgkXV9lChoBmgJaA9DCAMHtHSV1KBAlIaUUpRoFU3oA2gWR0CMmDVbzK9xdX2UKGgGaAloD0MIFTqvscs/nkCUhpRSlGgVTegDaBZHQIyfZLytmth1fZQoaAZoCWgPQwhVF/AyM9yXQJSGlFKUaBVN6ANoFkdAjKZbI91U2nV9lChoBmgJaA9DCEiLM4ZJcplAlIaUUpRoFU3oA2gWR0CMrXZRsMy8dX2UKGgGaAloD0MItykeF1VXjECUhpRSlGgVTegDaBZHQIy0WtfXwsp1fZQoaAZoCWgPQwgpB7MJEK6dQJSGlFKUaBVN6ANoFkdAjLtk43m3fHV9lChoBmgJaA9DCEM50a7itpxAlIaUUpRoFU3oA2gWR0CMw20OVgQZdX2UKGgGaAloD0MIKJ6zBRSQmkCUhpRSlGgVTegDaBZHQIzMTFl05lx1fZQoaAZoCWgPQwhwJxHhjw6dQJSGlFKUaBVN6ANoFkdAjNTz4k/r0XV9lChoBmgJaA9DCGo0uRhzc6BAlIaUUpRoFU3oA2gWR0CM3HIYFaB7dX2UKGgGaAloD0MIglMfSD5umkCUhpRSlGgVTegDaBZHQIzjinR9gF51fZQoaAZoCWgPQwg4oKUr2MCfQJSGlFKUaBVN6ANoFkdAjOsw2l2vCHV9lChoBmgJaA9DCExsPq7NDJxAlIaUUpRoFU3oA2gWR0CM9KQLeANHdX2UKGgGaAloD0MIFvvL7vEooECUhpRSlGgVTegDaBZHQIz8sFlkH2R1fZQoaAZoCWgPQwhYOh+eRQKdQJSGlFKUaBVN6ANoFkdAjQU4Lb5/LHV9lChoBmgJaA9DCNdP/1kTopdAlIaUUpRoFU3oA2gWR0CNDlNet0V8dX2UKGgGaAloD0MIRnnm5XDFnECUhpRSlGgVTegDaBZHQI0pQMH8jzJ1fZQoaAZoCWgPQwhYcaq1sAWbQJSGlFKUaBVN6ANoFkdAjTGptSAH3XV9lChoBmgJaA9DCPS/XItmcp9AlIaUUpRoFU3oA2gWR0CNOXZkCmuUdX2UKGgGaAloD0MI2zLgLKV0nECUhpRSlGgVTegDaBZHQI1AyP+4smR1fZQoaAZoCWgPQwgkgJvFy6KcQJSGlFKUaBVN6ANoFkdAjUe3QUpNK3V9lChoBmgJaA9DCAPPvYf7D59AlIaUUpRoFU3oA2gWR0CNTqp/gBLgdX2UKGgGaAloD0MIqvHSTZJdm0CUhpRSlGgVTegDaBZHQI1Vr3AVO9F1fZQoaAZoCWgPQwiZg6Cjde2fQJSGlFKUaBVN6ANoFkdAjV0O0CzTnnV9lChoBmgJaA9DCIFbd/NUfnlAlIaUUpRoFU3oA2gWR0CNZHgJkXk6dX2UKGgGaAloD0MI6kDWU9ttnUCUhpRSlGgVTegDaBZHQI1rcQAdXDF1fZQoaAZoCWgPQwie7dEbPlaZQJSGlFKUaBVN6ANoFkdAjXL1n27FsHV9lChoBmgJaA9DCDV+4ZUUJZdAlIaUUpRoFU3oA2gWR0CNemfOD8LsdX2UKGgGaAloD0MIOBWpMDaroUCUhpRSlGgVTegDaBZHQI2Bu98JD3N1fZQoaAZoCWgPQwg1QdR9QE+dQJSGlFKUaBVN6ANoFkdAjYjvY4ACGXV9lChoBmgJaA9DCLly9s4IdJ1AlIaUUpRoFU3oA2gWR0CNj+lyBCladX2UKGgGaAloD0MIu5hmusdOn0CUhpRSlGgVTegDaBZHQI2XMygwoLJ1fZQoaAZoCWgPQwjKb9HJguCcQJSGlFKUaBVN6ANoFkdAjZ4RYq5LAnV9lChoBmgJaA9DCGNH41AvLp9AlIaUUpRoFU3oA2gWR0CNpWTlDF6zdX2UKGgGaAloD0MIhbNby7RKl0CUhpRSlGgVTegDaBZHQI2skl7dBSl1fZQoaAZoCWgPQwholgSomV2VQJSGlFKUaBVN6ANoFkdAjbOErXlKb3V9lChoBmgJaA9DCKt7ZHNV6ZpAlIaUUpRoFU3oA2gWR0CNuj7w8W9EdX2UKGgGaAloD0MI/I9Mh06foECUhpRSlGgVTegDaBZHQI3BCdH2AXl1fZQoaAZoCWgPQwjH1ciu1LWTQJSGlFKUaBVN6ANoFkdAjcfsNlRP43V9lChoBmgJaA9DCNpzmZocxaJAlIaUUpRoFU3oA2gWR0CNzw3QUpNLdX2UKGgGaAloD0MIOUcdHYcFl0CUhpRSlGgVTegDaBZHQI3V5P69CeF1fZQoaAZoCWgPQwhBDkqYISOgQJSGlFKUaBVN6ANoFkdAjfBA0Kqn33V9lChoBmgJaA9DCKGi6lcKB51AlIaUUpRoFU3oA2gWR0CN94n1FpfydX2UKGgGaAloD0MI2ZjXEVeloECUhpRSlGgVTegDaBZHQI3+9UCJXQt1fZQoaAZoCWgPQwg4aRoU7fWeQJSGlFKUaBVN6ANoFkdAjgYd2xIJ7nV9lChoBmgJaA9DCNqpudxYVKBAlIaUUpRoFU3oA2gWR0CODU1k1/DtdX2UKGgGaAloD0MIYTQr2/fMmECUhpRSlGgVTegDaBZHQI4UrTtsvZh1fZQoaAZoCWgPQwi4yhMIMwahQJSGlFKUaBVN6ANoFkdAjhwRLK3d9HV9lChoBmgJaA9DCJRqn45H9ptAlIaUUpRoFU3oA2gWR0COIvaoMrmRdX2UKGgGaAloD0MIyhe0kCBolkCUhpRSlGgVTegDaBZHQI4qSYmb9ZR1fZQoaAZoCWgPQwhFEVK320ehQJSGlFKUaBVN6ANoFkdAjjGIGIKtxXV9lChoBmgJaA9DCJmdRe+kgqBAlIaUUpRoFU3oA2gWR0COOOVZ9uxbdX2UKGgGaAloD0MIgnFw6UigoUCUhpRSlGgVTegDaBZHQI5AiIznA7B1fZQoaAZoCWgPQwg89N2tHEOdQJSGlFKUaBVN6ANoFkdAjkeud5IH1XV9lChoBmgJaA9DCI49ey5LkaFAlIaUUpRoFU3oA2gWR0COTycvugHvdX2UKGgGaAloD0MIgbT/AYbmm0CUhpRSlGgVTegDaBZHQI5W39FWn0l1fZQoaAZoCWgPQwj3Ax4YIM+dQJSGlFKUaBVN6ANoFkdAjl44tHxz73V9lChoBmgJaA9DCM2SADV1h5lAlIaUUpRoFU3oA2gWR0COZTtu1ndwdX2UKGgGaAloD0MIvFruzNTym0CUhpRSlGgVTegDaBZHQI5seKbayrx1fZQoaAZoCWgPQwiFJLN6F4ieQJSGlFKUaBVN6ANoFkdAjnPH7pFCs3V9lChoBmgJaA9DCKzHfavthKBAlIaUUpRoFU3oA2gWR0COetRSgoPTdX2UKGgGaAloD0MIrmTHRhAvn0CUhpRSlGgVTegDaBZHQI6B06gdwNt1fZQoaAZoCWgPQwh72XbaEmyhQJSGlFKUaBVN6ANoFkdAjojWoNutOnV9lChoBmgJaA9DCCzWcJETraBAlIaUUpRoFU3oA2gWR0COj7TtLL6ldX2UKGgGaAloD0MIhe6SONMJoUCUhpRSlGgVTegDaBZHQI6WfctXgcd1fZQoaAZoCWgPQwh0RSkhqPmgQJSGlFKUaBVN6ANoFkdAjp1OxbB42XV9lChoBmgJaA9DCC5zuize76BAlIaUUpRoFU3oA2gWR0COtpmwJPZadX2UKGgGaAloD0MINlt5yUc1oUCUhpRSlGgVTegDaBZHQI69vZqVQhx1fZQoaAZoCWgPQwhsXWqEbgGiQJSGlFKUaBVN6ANoFkdAjsTukk8ifXV9lChoBmgJaA9DCK8FvTcWyKBAlIaUUpRoFU3oA2gWR0COy7B1s+FDdX2UKGgGaAloD0MII2k3+qivnECUhpRSlGgVTegDaBZHQI7SftKIznB1fZQoaAZoCWgPQwgGu2HbotChQJSGlFKUaBVN6ANoFkdAjtlmm1pj+nV9lChoBmgJaA9DCPT+P04IiJ9AlIaUUpRoFU3oA2gWR0CO4BBl+VkddX2UKGgGaAloD0MITDeJQeDwn0CUhpRSlGgVTegDaBZHQI7nCJIlMRJ1fZQoaAZoCWgPQwh/FkuRzNSZQJSGlFKUaBVN6ANoFkdAju4/4ZdfLXV9lChoBmgJaA9DCNLD0OokMqFAlIaUUpRoFU3oA2gWR0CO9UPUaybAdX2UKGgGaAloD0MIdjOjH7Wvo0CUhpRSlGgVTegDaBZHQI78MMAmzB11fZQoaAZoCWgPQwjiPQeW2yqhQJSGlFKUaBVN6ANoFkdAjwLdfTkQw3V9lChoBmgJaA9DCMxjzchwBJ9AlIaUUpRoFU3oA2gWR0CPCbfCQ9zPdX2UKGgGaAloD0MIHvzEAQRzokCUhpRSlGgVTegDaBZHQI8QmilBQep1fZQoaAZoCWgPQwjLgR5q2zSiQJSGlFKUaBVN6ANoFkdAjxeREWqLj3V9lChoBmgJaA9DCGgJMgKyBKNAlIaUUpRoFU3oA2gWR0CPHqzkZJkHdX2UKGgGaAloD0MIfTz03b04oECUhpRSlGgVTegDaBZHQI8lmNR3u/l1fZQoaAZoCWgPQwiQ9dTqy16dQJSGlFKUaBVN6ANoFkdAjyxfE4vN/3V9lChoBmgJaA9DCDzaOGIN3qBAlIaUUpRoFU3oA2gWR0CPM20rK/21dX2UKGgGaAloD0MInl4py5ByoECUhpRSlGgVTegDaBZHQI86WiUPhAJ1fZQoaAZoCWgPQwhOQ1ThL6ehQJSGlFKUaBVN6ANoFkdAj0GKw6hg3XV9lChoBmgJaA9DCJKVXwYThp5AlIaUUpRoFU3oA2gWR0CPSLrmhdt3dX2UKGgGaAloD0MIRkPGozxno0CUhpRSlGgVTegDaBZHQI9PzIvJzT51fZQoaAZoCWgPQwhJEK6ActKeQJSGlFKUaBVN6ANoFkdAj1b8jiXIEXV9lChoBmgJaA9DCJsb0xNu86FAlIaUUpRoFU3oA2gWR0CPXhL26ClKdWUu"
86
+ },
87
+ "ep_success_buffer": {
88
+ ":type:": "<class 'collections.deque'>",
89
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
90
+ },
91
+ "_n_updates": 31219,
92
+ "buffer_size": 1,
93
+ "batch_size": 128,
94
+ "learning_starts": 1000,
95
+ "tau": 0.08,
96
+ "gamma": 0.98,
97
+ "gradient_steps": 1,
98
+ "optimize_memory_usage": false,
99
+ "replay_buffer_class": {
100
+ ":type:": "<class 'abc.ABCMeta'>",
101
+ ":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
102
+ "__module__": "stable_baselines3.common.buffers",
103
+ "__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device:\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
104
+ "__init__": "<function ReplayBuffer.__init__ at 0x7f7d928b8040>",
105
+ "add": "<function ReplayBuffer.add at 0x7f7d928b80d0>",
106
+ "sample": "<function ReplayBuffer.sample at 0x7f7d928b8160>",
107
+ "_get_samples": "<function ReplayBuffer._get_samples at 0x7f7d928b81f0>",
108
+ "__abstractmethods__": "frozenset()",
109
+ "_abc_impl": "<_abc_data object at 0x7f7d9293a810>"
110
+ },
111
+ "replay_buffer_kwargs": {},
112
+ "train_freq": {
113
+ ":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>",
114
+ ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLIGgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"
115
+ },
116
+ "use_sde_at_warmup": false,
117
+ "target_entropy": -3.0,
118
+ "ent_coef": "auto",
119
+ "target_update_interval": 1
120
+ }
sac-seals-Hopper-v0/ent_coef_optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68d0dab5aa5b6a57c5417b8e4d6019d9cc2f09525ac5330a67f0124a2e7e7486
3
+ size 1191
sac-seals-Hopper-v0/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f3e1b358f0569d57b83c97fb5ef4f6eaf2a6d516a17cd3f1881a65e6c5a1dba
3
+ size 1415493
sac-seals-Hopper-v0/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b9473af1cf937c3a8dca9822ebd645dd96d536781e6bd85e62e05903419d35c
3
+ size 747
sac-seals-Hopper-v0/system_info.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ OS: Linux-5.4.0-121-generic-x86_64-with-glibc2.29 #137-Ubuntu SMP Wed Jun 15 13:33:07 UTC 2022
2
+ Python: 3.8.10
3
+ Stable-Baselines3: 1.5.1a8
4
+ PyTorch: 1.11.0+cu102
5
+ GPU Enabled: False
6
+ Numpy: 1.22.3
7
+ Gym: 0.21.0
train_eval_metrics.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23c3907da51c4be4d60984b5a7ffdf7a7e59da9144bf3d8069de69733e320101
3
+ size 32803