Skip to content

Commit

Permalink
Update gym version.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 253615467
Change-Id: I142eaedd5c73627900fc2bc5918716ba15274a4f
  • Loading branch information
Oscar Ramirez authored and copybara-github committed Jun 17, 2019
1 parent 4f7ccc4 commit 96e7b31
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ def main(_):

TEST_REQUIRED_PACKAGES = [
'atari_py == 0.1.7',
'gym == 0.10.11',
'gym == 0.12.5',
'opencv-python >= 3.4.1.15',
'pybullet == 2.4.2',
'pybullet',
'scipy == 1.1.0',
]

Expand Down
4 changes: 2 additions & 2 deletions tf_agents/environments/atari_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@ def __getattr__(self, name):
def _generate_observation(self):
return np.concatenate(self._frames, axis=2)

def _reset(self):
def reset(self):
observation = self._env.reset()
for _ in range(FrameStack4.STACK_SIZE):
self._frames.append(observation)
return self._generate_observation()

def _step(self, action):
def step(self, action):
observation, reward, done, info = self._env.step(action)
self._frames.append(observation)
return self._generate_observation(), reward, done, info
Expand Down
2 changes: 1 addition & 1 deletion tf_agents/environments/suite_atari.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def load(environment_name,
gym_spec = gym.spec(environment_name)
gym_env = gym_spec.make()

if max_episode_steps is None and gym_spec.timestep_limit is not None:
if max_episode_steps is None and gym_spec.max_episode_steps is not None:
max_episode_steps = gym_spec.max_episode_steps

return suite_gym.wrap_env(
Expand Down
6 changes: 3 additions & 3 deletions tf_agents/environments/suite_gym.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def load(environment_name,
discount: Discount to use for the environment.
max_episode_steps: If None the max_episode_steps will be set to the default
step limit defined in the environment's spec. No limit is applied if set
to 0 or if there is no timestep_limit set in the environment's spec.
to 0 or if there is no max_episode_steps set in the environment's spec.
gym_env_wrappers: Iterable with references to wrapper classes to use
directly on the gym environment.
env_wrappers: Iterable with references to wrapper classes to use on the
Expand All @@ -63,7 +63,7 @@ def load(environment_name,
gym_spec = gym.spec(environment_name)
gym_env = gym_spec.make()

if max_episode_steps is None and gym_spec.timestep_limit is not None:
if max_episode_steps is None and gym_spec.max_episode_steps is not None:
max_episode_steps = gym_spec.max_episode_steps

return wrap_env(
Expand Down Expand Up @@ -93,7 +93,7 @@ def wrap_env(gym_env,
gym_env: An instance of OpenAI gym environment.
discount: Discount to use for the environment.
max_episode_steps: Used to create a TimeLimitWrapper. No limit is applied
if set to 0. Usually set to `gym_spec.timestep_limit` as done in `load.
if set to 0. Usually set to `gym_spec.max_episode_steps` as done in `load.
gym_env_wrappers: Iterable with references to wrapper classes to use
directly on the gym environment.
time_limit_wrapper: Wrapper that accepts (env, max_episode_steps) params to
Expand Down

2 comments on commit 96e7b31

@8bitmp3
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks @oars, this fixed the AttributeError: 'EnvSpec' object has no attribute 'timestep_limit' issue

@oars
Copy link
Contributor

@oars oars commented on 96e7b31 Jun 18, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks! Closed the relevant issue.

Please sign in to comment.