Skip to content

Commit

Permalink
Image nav task (facebookresearch#333)
Browse files Browse the repository at this point in the history
Adding a new task: Image goal Navigation.
  • Loading branch information
thibautlavril authored Apr 21, 2020
1 parent 0e77aa5 commit 4214231
Show file tree
Hide file tree
Showing 4 changed files with 165 additions and 1 deletion.
24 changes: 24 additions & 0 deletions configs/tasks/imagenav.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
ENVIRONMENT:
MAX_EPISODE_STEPS: 500
SIMULATOR:
AGENT_0:
SENSORS: ['RGB_SENSOR']
HABITAT_SIM_V0:
GPU_DEVICE_ID: 0
RGB_SENSOR:
WIDTH: 256
HEIGHT: 256
DEPTH_SENSOR:
WIDTH: 256
HEIGHT: 256
TASK:
TYPE: Nav-v0
SUCCESS_DISTANCE: 0.2

SENSORS: ['IMAGEGOAL_SENSOR']
GOAL_SENSOR_UUID: imagegoal

MEASUREMENTS: ['DISTANCE_TO_GOAL', 'SPL']
SPL:
TYPE: SPL
SUCCESS_DISTANCE: 0.2
5 changes: 5 additions & 0 deletions habitat/config/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,11 @@ def __init__(self, *args, **kwargs):
_C.TASK.OBJECTGOAL_SENSOR.GOAL_SPEC = "TASK_CATEGORY_ID"
_C.TASK.OBJECTGOAL_SENSOR.GOAL_SPEC_MAX_VAL = 50
# -----------------------------------------------------------------------------
# IMAGEGOAL SENSOR
# -----------------------------------------------------------------------------
_C.TASK.IMAGEGOAL_SENSOR = CN()
_C.TASK.IMAGEGOAL_SENSOR.TYPE = "ImageGoalSensor"
# -----------------------------------------------------------------------------
# HEADING SENSOR
# -----------------------------------------------------------------------------
_C.TASK.HEADING_SENSOR = CN()
Expand Down
73 changes: 73 additions & 0 deletions habitat/tasks/nav/nav.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from habitat.core.registry import registry
from habitat.core.simulator import (
AgentState,
RGBSensor,
Sensor,
SensorTypes,
ShortestPathPoint,
Expand Down Expand Up @@ -202,6 +203,78 @@ def get_observation(
)


@registry.register_sensor
class ImageGoalSensor(Sensor):
r"""Sensor for ImageGoal observations which are used in ImageGoal Navigation.
RGBSensor needs to be one of the Simulator sensors.
This sensor return the rgb image taken from the goal position to reach with
random rotation.
Args:
sim: reference to the simulator for calculating task observations.
config: config for the ImageGoal sensor.
"""

def __init__(
self, *args: Any, sim: Simulator, config: Config, **kwargs: Any
):
self._sim = sim
sensors = self._sim.sensor_suite.sensors
rgb_sensor_uuids = [
uuid
for uuid, sensor in sensors.items()
if isinstance(sensor, RGBSensor)
]
if len(rgb_sensor_uuids) != 1:
raise ValueError(
f"ImageGoalNav requires one RGB sensor, {len(rgb_sensor_uuids)} detected"
)

(self._rgb_sensor_uuid,) = rgb_sensor_uuids
self._current_episode_id = None
self._current_image_goal = None
super().__init__(config=config)

def _get_uuid(self, *args: Any, **kwargs: Any):
return "imagegoal"

def _get_sensor_type(self, *args: Any, **kwargs: Any):
return SensorTypes.PATH

def _get_observation_space(self, *args: Any, **kwargs: Any):
return self._sim.sensor_suite.observation_spaces.spaces[
self._rgb_sensor_uuid
]

def _get_pointnav_episode_image_goal(self, episode: Episode):
goal_position = np.array(episode.goals[0].position, dtype=np.float32)
# to be sure that the rotation is the same for the same episode_id
# since the task is currently using pointnav Dataset.
seed = abs(hash(episode.episode_id)) % (2 ** 32)
rng = np.random.RandomState(seed)
angle = rng.uniform(0, 2 * np.pi)
source_rotation = [0, np.sin(angle / 2), 0, np.cos(angle / 2)]
goal_observation = self._sim.get_observations_at(
position=goal_position.tolist(), rotation=source_rotation
)
return goal_observation[self._rgb_sensor_uuid]

def get_observation(
self, *args: Any, observations, episode: Episode, **kwargs: Any
):
episode_uniq_id = episode.scene_id + episode.episode_id
if episode_uniq_id == self._current_episode_id:
return self._current_image_goal

self._current_image_goal = self._get_pointnav_episode_image_goal(
episode
)
self._current_episode_id = episode_uniq_id

return self._current_image_goal


@registry.register_sensor(name="PointGoalWithGPSCompassSensor")
class IntegratedPointGoalGPSAndCompassSensor(PointGoalSensor):
r"""Sensor that integrates PointGoals observations (which are used PointGoal Navigation) and GPS+Compass.
Expand Down
64 changes: 63 additions & 1 deletion test/test_sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def test_state_sensors():
NavigationEpisode(
episode_id="0",
scene_id=config.SIMULATOR.SCENE,
start_position=[03.00611, 0.072447, -2.67867],
start_position=[03.00611, 0.072_447, -2.67867],
start_rotation=random_rotation,
goals=[],
)
Expand Down Expand Up @@ -263,6 +263,68 @@ def test_pointgoal_with_gps_compass_sensor():
env.close()


def test_imagegoal_sensor():
config = get_config()
if not os.path.exists(config.SIMULATOR.SCENE):
pytest.skip("Please download Habitat test data to data folder.")
config.defrost()
config.TASK.SENSORS = ["IMAGEGOAL_SENSOR"]
config.SIMULATOR.AGENT_0.SENSORS = ["RGB_SENSOR"]
config.freeze()
env = habitat.Env(config=config, dataset=None)

# start position is checked for validity for the specific test scene
valid_start_position = [-1.3731, 0.08431, 8.60692]
pointgoal = [0.1, 0.2, 0.3]
goal_position = np.add(valid_start_position, pointgoal)

pointgoal_2 = [0.3, 0.2, 0.1]
goal_position_2 = np.add(valid_start_position, pointgoal_2)

# starting quaternion is rotated 180 degree along z-axis, which
# corresponds to simulator using z-negative as forward action
start_rotation = [0, 0, 0, 1]

env.episode_iterator = iter(
[
NavigationEpisode(
episode_id="0",
scene_id=config.SIMULATOR.SCENE,
start_position=valid_start_position,
start_rotation=start_rotation,
goals=[NavigationGoal(position=goal_position)],
),
NavigationEpisode(
episode_id="1",
scene_id=config.SIMULATOR.SCENE,
start_position=valid_start_position,
start_rotation=start_rotation,
goals=[NavigationGoal(position=goal_position_2)],
),
]
)
obs = env.reset()
for _ in range(100):
new_obs = env.step(sample_non_stop_action(env.action_space))
# check to see if taking non-stop actions will affect static image_goal
assert np.allclose(obs["imagegoal"], new_obs["imagegoal"])
assert np.allclose(obs["rgb"].shape, new_obs["imagegoal"].shape)

previous_episode_obs = obs
_ = env.reset()
for _ in range(10):
new_obs = env.step(sample_non_stop_action(env.action_space))
# check to see if taking non-stop actions will affect static image_goal
assert not np.allclose(
previous_episode_obs["imagegoal"], new_obs["imagegoal"]
)
assert np.allclose(
previous_episode_obs["rgb"].shape, new_obs["imagegoal"].shape
)

env.close()


def test_get_observations_at():
config = get_config()
if not os.path.exists(config.SIMULATOR.SCENE):
Expand Down

0 comments on commit 4214231

Please sign in to comment.