diff --git a/crates/re_renderer/examples/2d.rs b/crates/re_renderer/examples/2d.rs index cb4c25035645..a9ea6e7e5638 100644 --- a/crates/re_renderer/examples/2d.rs +++ b/crates/re_renderer/examples/2d.rs @@ -149,7 +149,7 @@ impl framework::Example for Render2D { // Moving the windows to a high dpi screen makes the second one bigger. // Also, it looks different under perspective projection. // The third point is automatic thickness which is determined by the point renderer implementation. - let mut point_cloud_builder = PointCloudBuilder::<()>::new(re_ctx); + let mut point_cloud_builder = PointCloudBuilder::new(re_ctx); point_cloud_builder .batch("points") .add_points_2d( diff --git a/crates/re_renderer/examples/depth_cloud.rs b/crates/re_renderer/examples/depth_cloud.rs index fb6a18821ace..ce80f17594c5 100644 --- a/crates/re_renderer/examples/depth_cloud.rs +++ b/crates/re_renderer/examples/depth_cloud.rs @@ -98,7 +98,7 @@ impl RenderDepthClouds { }) .multiunzip(); - let mut builder = PointCloudBuilder::<()>::new(re_ctx); + let mut builder = PointCloudBuilder::new(re_ctx); builder .batch("backprojected point cloud") .add_points(num_points as _, points.into_iter()) diff --git a/crates/re_renderer/examples/framework.rs b/crates/re_renderer/examples/framework.rs index 65d16fa3cf30..47e0b42a3212 100644 --- a/crates/re_renderer/examples/framework.rs +++ b/crates/re_renderer/examples/framework.rs @@ -210,10 +210,10 @@ impl Application { Event::WindowEvent { event: WindowEvent::CursorMoved { position, .. }, .. - } => self.example.on_cursor_moved(glam::uvec2( - position.x.round() as u32, - position.y.round() as u32, - )), + } => self + .example + // Don't round the position: The entire range from 0 to excluding 1 should fall into pixel coordinate 0! + .on_cursor_moved(glam::uvec2(position.x as u32, position.y as u32)), Event::WindowEvent { event: WindowEvent::ScaleFactorChanged { diff --git a/crates/re_renderer/examples/multiview.rs b/crates/re_renderer/examples/multiview.rs index 24c76d4c9d5f..35cb61cd2369 100644 --- a/crates/re_renderer/examples/multiview.rs +++ b/crates/re_renderer/examples/multiview.rs @@ -316,7 +316,7 @@ impl Example for Multiview { let skybox = GenericSkyboxDrawData::new(re_ctx); let lines = build_lines(re_ctx, seconds_since_startup); - let mut builder = PointCloudBuilder::<()>::new(re_ctx); + let mut builder = PointCloudBuilder::new(re_ctx); builder .batch("Random Points") .world_from_obj(glam::Mat4::from_rotation_x(seconds_since_startup)) diff --git a/crates/re_renderer/examples/picking.rs b/crates/re_renderer/examples/picking.rs index 5344e044d26d..f304b241ed1a 100644 --- a/crates/re_renderer/examples/picking.rs +++ b/crates/re_renderer/examples/picking.rs @@ -157,7 +157,7 @@ impl framework::Example for Picking { .schedule_picking_rect(re_ctx, picking_rect, READBACK_IDENTIFIER, (), false) .unwrap(); - let mut point_builder = PointCloudBuilder::<()>::new(re_ctx); + let mut point_builder = PointCloudBuilder::new(re_ctx); for (i, point_set) in self.point_sets.iter().enumerate() { point_builder .batch(format!("Random Points {i}")) diff --git a/crates/re_renderer/src/draw_phases/picking_layer.rs b/crates/re_renderer/src/draw_phases/picking_layer.rs index 69b125529b2d..dc5cf38f033f 100644 --- a/crates/re_renderer/src/draw_phases/picking_layer.rs +++ b/crates/re_renderer/src/draw_phases/picking_layer.rs @@ -224,7 +224,7 @@ impl PickingLayerProcessor { DepthReadbackWorkaround::new(ctx, picking_rect.extent, picking_depth_target.handle) }); - let rect_min = picking_rect.top_left_corner.as_vec2(); + let rect_min = picking_rect.left_top.as_vec2(); let rect_max = rect_min + picking_rect.extent.as_vec2(); let screen_resolution = screen_resolution.as_vec2(); // y axis is flipped in NDC, therefore we need to flip the y axis of the rect. @@ -232,10 +232,10 @@ impl PickingLayerProcessor { pixel_coord_to_ndc(glam::vec2(rect_min.x, rect_max.y), screen_resolution); let rect_max_ndc = pixel_coord_to_ndc(glam::vec2(rect_max.x, rect_min.y), screen_resolution); - let rect_center_ndc = (rect_min_ndc + rect_max_ndc) * 0.5; - let cropped_projection_from_projection = - glam::Mat4::from_scale(2.0 / (rect_max_ndc - rect_min_ndc).extend(1.0)) - * glam::Mat4::from_translation(-rect_center_ndc.extend(0.0)); + let scale = 2.0 / (rect_max_ndc - rect_min_ndc); + let translation = -0.5 * (rect_min_ndc + rect_max_ndc); + let cropped_projection_from_projection = glam::Mat4::from_scale(scale.extend(1.0)) + * glam::Mat4::from_translation(translation.extend(0.0)); // Setup frame uniform buffer let previous_projection_from_world: glam::Mat4 = diff --git a/crates/re_renderer/src/point_cloud_builder.rs b/crates/re_renderer/src/point_cloud_builder.rs index 84ef0e0187b9..3596f6f3d72d 100644 --- a/crates/re_renderer/src/point_cloud_builder.rs +++ b/crates/re_renderer/src/point_cloud_builder.rs @@ -9,23 +9,19 @@ use crate::{ }; /// Builder for point clouds, making it easy to create [`crate::renderer::PointCloudDrawData`]. -pub struct PointCloudBuilder { - // Size of `point`/color`/`per_point_user_data` must be equal. +pub struct PointCloudBuilder { + // Size of `point`/color` must be equal. pub vertices: Vec, pub(crate) color_buffer: CpuWriteGpuReadBuffer, pub(crate) picking_instance_ids_buffer: CpuWriteGpuReadBuffer, - pub user_data: Vec, pub(crate) batches: Vec, pub(crate) radius_boost_in_ui_points_for_outlines: f32, } -impl PointCloudBuilder -where - PerPointUserData: Default + Copy, -{ +impl PointCloudBuilder { pub fn new(ctx: &RenderContext) -> Self { const RESERVE_SIZE: usize = 512; @@ -48,7 +44,6 @@ where vertices: Vec::with_capacity(RESERVE_SIZE), color_buffer, picking_instance_ids_buffer, - user_data: Vec::with_capacity(RESERVE_SIZE), batches: Vec::with_capacity(16), radius_boost_in_ui_points_for_outlines: 0.0, } @@ -65,10 +60,7 @@ where /// Start of a new batch. #[inline] - pub fn batch( - &mut self, - label: impl Into, - ) -> PointCloudBatchBuilder<'_, PerPointUserData> { + pub fn batch(&mut self, label: impl Into) -> PointCloudBatchBuilder<'_> { self.batches.push(PointCloudBatchInfo { label: label.into(), world_from_obj: glam::Mat4::IDENTITY, @@ -105,30 +97,6 @@ where }) } - // Iterate over all batches, yielding the batch info and a point vertex iterator zipped with its user data. - pub fn iter_vertices_and_userdata_by_batch( - &self, - ) -> impl Iterator< - Item = ( - &PointCloudBatchInfo, - impl Iterator, - ), - > { - let mut vertex_offset = 0; - self.batches.iter().map(move |batch| { - let out = ( - batch, - self.vertices - .iter() - .zip(self.user_data.iter()) - .skip(vertex_offset) - .take(batch.point_count as usize), - ); - vertex_offset += batch.point_count as usize; - out - }) - } - /// Finalizes the builder and returns a point cloud draw data with all the points added so far. pub fn to_draw_data( self, @@ -138,16 +106,9 @@ where } } -pub struct PointCloudBatchBuilder<'a, PerPointUserData>( - &'a mut PointCloudBuilder, -) -where - PerPointUserData: Default + Copy; +pub struct PointCloudBatchBuilder<'a>(&'a mut PointCloudBuilder); -impl<'a, PerPointUserData> Drop for PointCloudBatchBuilder<'a, PerPointUserData> -where - PerPointUserData: Default + Copy, -{ +impl<'a> Drop for PointCloudBatchBuilder<'a> { fn drop(&mut self) { // Remove batch again if it wasn't actually used. if self.0.batches.last().unwrap().point_count == 0 { @@ -157,10 +118,7 @@ where } } -impl<'a, PerPointUserData> PointCloudBatchBuilder<'a, PerPointUserData> -where - PerPointUserData: Default + Copy, -{ +impl<'a> PointCloudBatchBuilder<'a> { #[inline] fn batch_mut(&mut self) -> &mut PointCloudBatchInfo { self.0 @@ -200,13 +158,6 @@ where self.0.vertices.len() - self.0.picking_instance_ids_buffer.num_written(), )); } - - if self.0.user_data.len() < self.0.vertices.len() { - self.0.user_data.extend( - std::iter::repeat(PerPointUserData::default()) - .take(self.0.vertices.len() - self.0.user_data.len()), - ); - } } #[inline] @@ -222,7 +173,7 @@ where &mut self, size_hint: usize, positions: impl Iterator, - ) -> PointsBuilder<'_, PerPointUserData> { + ) -> PointsBuilder<'_> { // TODO(jleibs): Figure out if we can plumb-through proper support for `Iterator::size_hints()` // or potentially make `FixedSizedIterator` work correctly. This should be possible size the // underlying arrow structures are of known-size, but carries some complexity with the amount of @@ -232,7 +183,6 @@ where self.extend_defaults(); debug_assert_eq!(self.0.vertices.len(), self.0.color_buffer.num_written()); - debug_assert_eq!(self.0.vertices.len(), self.0.user_data.len()); let old_size = self.0.vertices.len(); @@ -245,8 +195,6 @@ where let num_points = self.0.vertices.len() - old_size; self.batch_mut().point_count += num_points as u32; - self.0.user_data.reserve(num_points); - let new_range = old_size..self.0.vertices.len(); let max_points = self.0.vertices.len(); @@ -256,7 +204,6 @@ where max_points, colors: &mut self.0.color_buffer, picking_instance_ids: &mut self.0.picking_instance_ids_buffer, - user_data: &mut self.0.user_data, additional_outline_mask_ids: &mut self .0 .batches @@ -268,24 +215,22 @@ where } #[inline] - pub fn add_point(&mut self, position: glam::Vec3) -> PointBuilder<'_, PerPointUserData> { + pub fn add_point(&mut self, position: glam::Vec3) -> PointBuilder<'_> { self.extend_defaults(); debug_assert_eq!(self.0.vertices.len(), self.0.color_buffer.num_written()); - debug_assert_eq!(self.0.vertices.len(), self.0.user_data.len()); let vertex_index = self.0.vertices.len() as u32; self.0.vertices.push(PointCloudVertex { position, radius: Size::AUTO, }); - self.0.user_data.push(Default::default()); self.batch_mut().point_count += 1; PointBuilder { vertex: self.0.vertices.last_mut().unwrap(), color: &mut self.0.color_buffer, - user_data: self.0.user_data.last_mut().unwrap(), + picking_instance_id: &mut self.0.picking_instance_ids_buffer, vertex_index, additional_outline_mask_ids: &mut self .0 @@ -308,13 +253,13 @@ where &mut self, size_hint: usize, positions: impl Iterator, - ) -> PointsBuilder<'_, PerPointUserData> { + ) -> PointsBuilder<'_> { self.add_points(size_hint, positions.map(|p| p.extend(0.0))) } /// Adds a single 2D point. Uses an autogenerated depth value. #[inline] - pub fn add_point_2d(&mut self, position: glam::Vec2) -> PointBuilder<'_, PerPointUserData> { + pub fn add_point_2d(&mut self, position: glam::Vec2) -> PointBuilder<'_> { self.add_point(position.extend(0.0)) } @@ -331,19 +276,17 @@ where } // TODO(andreas): Should remove single-point builder, practically this never makes sense as we're almost always dealing with arrays of points. -pub struct PointBuilder<'a, PerPointUserData> { +pub struct PointBuilder<'a> { vertex: &'a mut PointCloudVertex, color: &'a mut CpuWriteGpuReadBuffer, - user_data: &'a mut PerPointUserData, + picking_instance_id: &'a mut CpuWriteGpuReadBuffer, vertex_index: u32, + additional_outline_mask_ids: &'a mut Vec<(std::ops::Range, OutlineMaskPreference)>, outline_mask_id: OutlineMaskPreference, } -impl<'a, PerPointUserData> PointBuilder<'a, PerPointUserData> -where - PerPointUserData: Clone, -{ +impl<'a> PointBuilder<'a> { #[inline] pub fn radius(self, radius: Size) -> Self { self.vertex.radius = radius; @@ -357,21 +300,24 @@ where self } - pub fn user_data(self, data: PerPointUserData) -> Self { - *self.user_data = data; - self - } - /// Pushes additional outline mask ids for this point /// /// Prefer the `overall_outline_mask_ids` setting to set the outline mask ids for the entire batch whenever possible! + #[inline] pub fn outline_mask_id(mut self, outline_mask_id: OutlineMaskPreference) -> Self { self.outline_mask_id = outline_mask_id; self } + + /// This mustn't call this more than once. + #[inline] + pub fn picking_instance_id(self, picking_instance_id: PickingLayerInstanceId) -> Self { + self.picking_instance_id.push(picking_instance_id); + self + } } -impl<'a, PerPointUserData> Drop for PointBuilder<'a, PerPointUserData> { +impl<'a> Drop for PointBuilder<'a> { fn drop(&mut self) { if self.outline_mask_id.is_some() { self.additional_outline_mask_ids.push(( @@ -382,21 +328,17 @@ impl<'a, PerPointUserData> Drop for PointBuilder<'a, PerPointUserData> { } } -pub struct PointsBuilder<'a, PerPointUserData> { +pub struct PointsBuilder<'a> { // Vertices is a slice, which radii will update vertices: &'a mut [PointCloudVertex], max_points: usize, colors: &'a mut CpuWriteGpuReadBuffer, picking_instance_ids: &'a mut CpuWriteGpuReadBuffer, - user_data: &'a mut Vec, additional_outline_mask_ids: &'a mut Vec<(std::ops::Range, OutlineMaskPreference)>, start_vertex_index: u32, } -impl<'a, PerPointUserData> PointsBuilder<'a, PerPointUserData> -where - PerPointUserData: Clone, -{ +impl<'a> PointsBuilder<'a> { /// Assigns radii to all points. /// /// This mustn't call this more than once. @@ -440,19 +382,6 @@ where self } - /// Assigns user data for all points in this builder. - /// - /// This mustn't call this more than once. - /// - /// User data is currently not available on the GPU. - #[inline] - pub fn user_data(self, data: impl Iterator) -> Self { - crate::profile_function!(); - self.user_data - .extend(data.take(self.max_points - self.user_data.len())); - self - } - /// Pushes additional outline mask ids for a specific range of points. /// The range is relative to this builder's range, not the entire batch. /// diff --git a/crates/re_renderer/src/rect.rs b/crates/re_renderer/src/rect.rs index 60c48ea82ae4..8b70e81ac357 100644 --- a/crates/re_renderer/src/rect.rs +++ b/crates/re_renderer/src/rect.rs @@ -4,7 +4,7 @@ #[derive(Clone, Copy, Debug)] pub struct IntRect { /// The top left corner of the rectangle. - pub top_left_corner: glam::IVec2, + pub left_top: glam::IVec2, /// The size of the rectangle. pub extent: glam::UVec2, @@ -14,23 +14,23 @@ impl IntRect { #[inline] pub fn from_middle_and_extent(middle: glam::IVec2, size: glam::UVec2) -> Self { Self { - top_left_corner: middle - size.as_ivec2() / 2, + left_top: middle - size.as_ivec2() / 2, extent: size, } } #[inline] - pub fn width(&self) -> u32 { + pub fn width(self) -> u32 { self.extent.x } #[inline] - pub fn height(&self) -> u32 { - self.extent.x + pub fn height(self) -> u32 { + self.extent.y } #[inline] - pub fn wgpu_extent(&self) -> wgpu::Extent3d { + pub fn wgpu_extent(self) -> wgpu::Extent3d { wgpu::Extent3d { width: self.extent.x, height: self.extent.y, diff --git a/crates/re_renderer/src/renderer/debug_overlay.rs b/crates/re_renderer/src/renderer/debug_overlay.rs index 276f8fc413da..6e615cd4a710 100644 --- a/crates/re_renderer/src/renderer/debug_overlay.rs +++ b/crates/re_renderer/src/renderer/debug_overlay.rs @@ -93,7 +93,7 @@ impl DebugOverlayDrawData { "DebugOverlayDrawData".into(), gpu_data::DebugOverlayUniformBuffer { screen_resolution: screen_resolution.as_vec2().into(), - position_in_pixel: overlay_rect.top_left_corner.as_vec2().into(), + position_in_pixel: overlay_rect.left_top.as_vec2().into(), extent_in_pixel: overlay_rect.extent.as_vec2().into(), mode: mode as u32, _padding: 0, diff --git a/crates/re_renderer/src/renderer/point_cloud.rs b/crates/re_renderer/src/renderer/point_cloud.rs index 1e9bfb77a578..15059d82fd04 100644 --- a/crates/re_renderer/src/renderer/point_cloud.rs +++ b/crates/re_renderer/src/renderer/point_cloud.rs @@ -173,9 +173,9 @@ impl PointCloudDrawData { /// Number of vertices and colors has to be equal. /// /// If no batches are passed, all points are assumed to be in a single batch with identity transform. - pub fn new( + pub fn new( ctx: &mut RenderContext, - mut builder: PointCloudBuilder, + mut builder: PointCloudBuilder, ) -> Result { crate::profile_function!(); diff --git a/crates/re_viewer/src/ui/view_spatial/eye.rs b/crates/re_viewer/src/ui/view_spatial/eye.rs index 101da1ca6d7c..287481311101 100644 --- a/crates/re_viewer/src/ui/view_spatial/eye.rs +++ b/crates/re_viewer/src/ui/view_spatial/eye.rs @@ -48,24 +48,24 @@ impl Eye { } } - pub fn ui_from_world(&self, rect: &Rect) -> Mat4 { - let aspect_ratio = rect.width() / rect.height(); + pub fn ui_from_world(&self, space2d_rect: Rect) -> Mat4 { + let aspect_ratio = space2d_rect.width() / space2d_rect.height(); let projection = if let Some(fov_y) = self.fov_y { Mat4::perspective_infinite_rh(fov_y, aspect_ratio, self.near()) } else { Mat4::orthographic_rh( - rect.left(), - rect.right(), - rect.bottom(), - rect.top(), + space2d_rect.left(), + space2d_rect.right(), + space2d_rect.bottom(), + space2d_rect.top(), self.near(), self.far(), ) }; - Mat4::from_translation(vec3(rect.center().x, rect.center().y, 0.0)) - * Mat4::from_scale(0.5 * vec3(rect.width(), -rect.height(), 1.0)) + Mat4::from_translation(vec3(space2d_rect.center().x, space2d_rect.center().y, 0.0)) + * Mat4::from_scale(0.5 * vec3(space2d_rect.width(), -space2d_rect.height(), 1.0)) * projection * self.world_from_view.inverse() } @@ -80,7 +80,7 @@ impl Eye { /// Picking ray for a given pointer in the parent space /// (i.e. prior to camera transform, "world" space) - pub fn picking_ray(&self, screen_rect: &Rect, pointer: glam::Vec2) -> macaw::Ray3 { + pub fn picking_ray(&self, screen_rect: Rect, pointer: glam::Vec2) -> macaw::Ray3 { if let Some(fov_y) = self.fov_y { let (w, h) = (screen_rect.width(), screen_rect.height()); let aspect_ratio = w / h; diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 7f7c9cb90a7c..93dbc5845c2e 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -8,7 +8,7 @@ use re_log_types::{ }; use re_renderer::{Color32, OutlineMaskPreference, Size}; -use super::{eye::Eye, SpaceCamera3D, SpatialNavigationMode}; +use super::{SpaceCamera3D, SpatialNavigationMode}; use crate::{ misc::{mesh_loader::LoadedMesh, SpaceViewHighlights, TransformCache, ViewerContext}, ui::{ @@ -21,7 +21,7 @@ mod picking; mod primitives; mod scene_part; -pub use self::picking::{AdditionalPickingInfo, PickingRayHit, PickingResult}; +pub use self::picking::{AdditionalPickingInfo, PickingContext, PickingRayHit, PickingResult}; pub use self::primitives::SceneSpatialPrimitives; use scene_part::ScenePart; @@ -246,28 +246,4 @@ impl SceneSpatial { SpatialNavigationMode::ThreeD } - - #[allow(clippy::too_many_arguments)] - pub fn picking( - &self, - render_ctx: &re_renderer::RenderContext, - gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, - previous_picking_result: &Option, - pointer_in_ui: glam::Vec2, - ui_rect: &egui::Rect, - eye: &Eye, - ui_interaction_radius: f32, - ) -> PickingResult { - picking::picking( - render_ctx, - gpu_readback_identifier, - previous_picking_result, - pointer_in_ui, - ui_rect, - eye, - &self.primitives, - &self.ui, - ui_interaction_radius, - ) - } } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs index ea813f460c91..d74d80ef1ee1 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs @@ -1,3 +1,5 @@ +//! Handles picking in 2D & 3D spaces. + use itertools::Itertools as _; use re_data_store::InstancePathHash; @@ -18,6 +20,9 @@ pub enum AdditionalPickingInfo { /// The hit was a textured rect at the given uv coordinates (ranging from 0 to 1) TexturedRect(glam::Vec2), + /// The result came from GPU based picking. + GpuPickingResult, + /// We hit a egui ui element, meaning that depth information is not usable. GuiOverlay, } @@ -36,9 +41,6 @@ pub struct PickingRayHit { /// Any additional information about the picking hit. pub info: AdditionalPickingInfo, - - /// True if this picking result came from a GPU picking pass. - pub used_gpu_picking: bool, } impl PickingRayHit { @@ -48,9 +50,12 @@ impl PickingRayHit { ray_t: t, info: AdditionalPickingInfo::None, depth_offset: 0, - used_gpu_picking: false, } } + + pub fn space_position(&self, ray_in_world: &macaw::Ray3) -> glam::Vec3 { + ray_in_world.origin + ray_in_world.dir * self.ray_t + } } #[derive(Clone)] @@ -61,33 +66,25 @@ pub struct PickingResult { /// Picking ray hits for transparent objects, sorted from far to near. /// If there is an opaque hit, all of them are in front of the opaque hit. pub transparent_hits: Vec, - - /// The picking ray used. Given in the coordinates of the space the picking is performed in. - picking_ray: macaw::Ray3, } impl PickingResult { - /// The space position of a given hit. - #[allow(dead_code)] - pub fn space_position(&self, hit: &PickingRayHit) -> glam::Vec3 { - self.picking_ray.origin + self.picking_ray.dir * hit.ray_t - } - /// Iterates over all hits from far to close. pub fn iter_hits(&self) -> impl Iterator { self.opaque_hit.iter().chain(self.transparent_hits.iter()) } + + pub fn space_position(&self, ray_in_world: &macaw::Ray3) -> Option { + self.opaque_hit + .as_ref() + .or_else(|| self.transparent_hits.last()) + .map(|hit| hit.space_position(ray_in_world)) + } } const RAY_T_EPSILON: f32 = f32::EPSILON; -struct PickingContext { - pointer_in_ui: glam::Vec2, - ray_in_world: macaw::Ray3, - ui_from_world: glam::Mat4, - max_side_ui_dist_sq: f32, -} - +/// State used to build up picking results. struct PickingState { closest_opaque_side_ui_dist_sq: f32, closest_opaque_pick: PickingRayHit, @@ -138,146 +135,208 @@ impl PickingState { } } -#[allow(clippy::too_many_arguments)] -pub fn picking( - render_ctx: &re_renderer::RenderContext, - gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, - previous_picking_result: &Option, - pointer_in_ui: glam::Vec2, - ui_rect: &egui::Rect, - eye: &Eye, - primitives: &SceneSpatialPrimitives, - ui_data: &SceneSpatialUiData, - ui_interaction_radius: f32, -) -> PickingResult { - crate::profile_function!(); +/// Picking context in which picking is performed. +pub struct PickingContext { + /// Cursor position in the UI coordinate system. + pub pointer_in_ui: glam::Vec2, - let max_side_ui_dist_sq = ui_interaction_radius * ui_interaction_radius; - - let context = PickingContext { - pointer_in_ui, - ui_from_world: eye.ui_from_world(ui_rect), - ray_in_world: eye.picking_ray(ui_rect, pointer_in_ui), - max_side_ui_dist_sq, - }; - let mut state = PickingState { - closest_opaque_side_ui_dist_sq: max_side_ui_dist_sq, - closest_opaque_pick: PickingRayHit { - instance_path_hash: InstancePathHash::NONE, - ray_t: f32::INFINITY, - info: AdditionalPickingInfo::None, - depth_offset: 0, - used_gpu_picking: false, - }, - // Combined, sorted (and partially "hidden") by opaque results later. - transparent_hits: Vec::new(), - }; - - let SceneSpatialPrimitives { - bounding_box: _, - textured_rectangles, - textured_rectangles_ids, - line_strips, - points, - meshes, - depth_clouds: _, // no picking for depth clouds yet - any_outlines: _, - } = primitives; - - picking_points(&context, &mut state, points); - picking_lines(&context, &mut state, line_strips); - picking_meshes(&context, &mut state, meshes); - picking_textured_rects( - &context, - &mut state, - textured_rectangles, - textured_rectangles_ids, - ); - picking_ui_rects(&context, &mut state, ui_data); - - // GPU based picking. - // Only look at newest available result, discard everything else. - let mut gpu_picking_result = None; - while let Some(picking_result) = - PickingLayerProcessor::next_readback_result::<()>(render_ctx, gpu_readback_identifier) - { - gpu_picking_result = Some(picking_result); - } - // TODO(andreas): Use gpu picking as fallback for now to fix meshes. Should combine instead! - if state.closest_opaque_pick.instance_path_hash == InstancePathHash::NONE { - if let Some(gpu_picking_result) = gpu_picking_result { - // TODO(andreas): Pick middle pixel for now. But we soon want to snap to the closest object using a bigger picking rect. - let pos_on_picking_rect = gpu_picking_result.rect.extent / 2; - let picked_id = gpu_picking_result.picked_id(pos_on_picking_rect); - let picked_object = instance_path_hash_from_picking_layer_id(picked_id); - - // It is old data, the object might be gone by now! - if picked_object.is_some() { - // TODO(andreas): Once this is the primary path we should not awkwardly reconstruct the ray_t here. It's entirely correct either! - state.closest_opaque_pick.ray_t = gpu_picking_result - .picked_world_position(pos_on_picking_rect) - .distance(context.ray_in_world.origin); - state.closest_opaque_pick.instance_path_hash = picked_object; - state.closest_opaque_pick.used_gpu_picking = true; - } - } else { - // It is possible that some frames we don't get a picking result and the frame after we get several. - // We need to cache the last picking result and use it until we get a new one or the mouse leaves the screen. - // (Andreas: On my mac this *actually* happens in very simple scenes, I get occasional frames with 0 and then with 2 picking results!) - if let Some(PickingResult { - opaque_hit: Some(previous_opaque_hit), - .. - }) = previous_picking_result - { - if previous_opaque_hit.used_gpu_picking { - state.closest_opaque_pick = previous_opaque_hit.clone(); - } - } + /// Cursor position on the renderer canvas in pixels. + pub pointer_in_pixel: glam::Vec2, + + /// Cursor position in the 2D space coordinate system. + /// + /// For 3D spaces this is equal to the cursor position in pixel coordinate system. + pub pointer_in_space2d: glam::Vec2, + + /// The picking ray used. Given in the coordinates of the space the picking is performed in. + pub ray_in_world: macaw::Ray3, + + /// Transformation from ui coordinates to world coordinates. + ui_from_world: glam::Mat4, + + /// Multiply with this to convert to pixels from points. + pixels_from_points: f32, +} + +impl PickingContext { + /// Radius in which cursor interactions may snap to the nearest object even if the cursor + /// does not hover it directly. + /// + /// Note that this needs to be scaled when zooming is applied by the virtual->visible ui rect transform. + pub const UI_INTERACTION_RADIUS: f32 = 5.0; + + pub fn new( + pointer_in_ui: egui::Pos2, + space2d_from_ui: eframe::emath::RectTransform, + ui_clip_rect: egui::Rect, + pixels_from_points: f32, + eye: &Eye, + ) -> PickingContext { + let pointer_in_space2d = space2d_from_ui.transform_pos(pointer_in_ui); + let pointer_in_space2d = glam::vec2(pointer_in_space2d.x, pointer_in_space2d.y); + let pointer_in_pixel = (pointer_in_ui - ui_clip_rect.left_top()) * pixels_from_points; + + PickingContext { + pointer_in_space2d, + pointer_in_pixel: glam::vec2(pointer_in_pixel.x, pointer_in_pixel.y), + pointer_in_ui: glam::vec2(pointer_in_ui.x, pointer_in_ui.y), + ui_from_world: eye.ui_from_world(*space2d_from_ui.to()), + ray_in_world: eye.picking_ray(*space2d_from_ui.to(), pointer_in_space2d), + pixels_from_points, } } - state.sort_and_remove_hidden_transparent(); + /// Performs picking for a given scene. + pub fn pick( + &self, + render_ctx: &re_renderer::RenderContext, + gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, + previous_picking_result: &Option, + primitives: &SceneSpatialPrimitives, + ui_data: &SceneSpatialUiData, + ) -> PickingResult { + crate::profile_function!(); + + let max_side_ui_dist_sq = Self::UI_INTERACTION_RADIUS * Self::UI_INTERACTION_RADIUS; + + let mut state = PickingState { + closest_opaque_side_ui_dist_sq: max_side_ui_dist_sq, + closest_opaque_pick: PickingRayHit { + instance_path_hash: InstancePathHash::NONE, + ray_t: f32::INFINITY, + info: AdditionalPickingInfo::None, + depth_offset: 0, + }, + // Combined, sorted (and partially "hidden") by opaque results later. + transparent_hits: Vec::new(), + }; + + let SceneSpatialPrimitives { + bounding_box: _, + textured_rectangles, + textured_rectangles_ids, + line_strips, + points: _, + meshes: _, + depth_clouds: _, // no picking for depth clouds yet + any_outlines: _, + } = primitives; + + // GPU based picking. + picking_gpu( + render_ctx, + gpu_readback_identifier, + &mut state, + self, + previous_picking_result, + ); + + picking_lines(self, &mut state, line_strips); + picking_textured_rects( + self, + &mut state, + textured_rectangles, + textured_rectangles_ids, + ); + picking_ui_rects(self, &mut state, ui_data); + + state.sort_and_remove_hidden_transparent(); - PickingResult { - opaque_hit: state - .closest_opaque_pick - .instance_path_hash - .is_some() - .then_some(state.closest_opaque_pick), - transparent_hits: state.transparent_hits, - picking_ray: context.ray_in_world, + PickingResult { + opaque_hit: state + .closest_opaque_pick + .instance_path_hash + .is_some() + .then_some(state.closest_opaque_pick), + transparent_hits: state.transparent_hits, + } } } -fn picking_points( - context: &PickingContext, +fn picking_gpu( + render_ctx: &re_renderer::RenderContext, + gpu_readback_identifier: u64, state: &mut PickingState, - points: &re_renderer::PointCloudBuilder, + context: &PickingContext, + previous_picking_result: &Option, ) { crate::profile_function!(); - for (batch, vertex_iter) in points.iter_vertices_and_userdata_by_batch() { - // For getting the closest point we could transform the mouse ray into the "batch space". - // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. - let ui_from_batch = context.ui_from_world * batch.world_from_obj; + // Only look at newest available result, discard everything else. + let mut gpu_picking_result = None; + while let Some(picking_result) = + PickingLayerProcessor::next_readback_result::<()>(render_ctx, gpu_readback_identifier) + { + gpu_picking_result = Some(picking_result); + } - for (point, instance_hash) in vertex_iter { - if instance_hash.is_none() { - continue; + if let Some(gpu_picking_result) = gpu_picking_result { + // First, figure out where on the rect the cursor is by now. + // (for simplicity, we assume the screen hasn't been resized) + let pointer_on_picking_rect = + context.pointer_in_pixel - gpu_picking_result.rect.left_top.as_vec2(); + // The cursor might have moved outside of the rect. Clamp it back in. + let pointer_on_picking_rect = pointer_on_picking_rect.clamp( + glam::Vec2::ZERO, + (gpu_picking_result.rect.extent - glam::UVec2::ONE).as_vec2(), + ); + + // Find closest non-zero pixel to the cursor. + let mut picked_id = re_renderer::PickingLayerId::default(); + let mut picked_on_picking_rect = glam::Vec2::ZERO; + let mut closest_rect_distance_sq = f32::INFINITY; + + for (i, id) in gpu_picking_result.picking_id_data.iter().enumerate() { + if id.object.0 != 0 { + let current_pos_on_picking_rect = glam::uvec2( + i as u32 % gpu_picking_result.rect.extent.x, + i as u32 / gpu_picking_result.rect.extent.x, + ) + .as_vec2() + + glam::vec2(0.5, 0.5); // Use pixel center for distances. + let distance_sq = + current_pos_on_picking_rect.distance_squared(pointer_on_picking_rect); + if distance_sq < closest_rect_distance_sq { + picked_on_picking_rect = current_pos_on_picking_rect; + closest_rect_distance_sq = distance_sq; + picked_id = *id; + } } + } + if picked_id == re_renderer::PickingLayerId::default() { + // Nothing found. + return; + } - // TODO(emilk): take point radius into account - let pos_in_ui = ui_from_batch.project_point3(point.position); - let dist_sq = pos_in_ui.truncate().distance_squared(context.pointer_in_ui); - if dist_sq <= state.closest_opaque_side_ui_dist_sq { - let t = context - .ray_in_world - .closest_t_to_point(batch.world_from_obj.transform_point3(point.position)); - state.check_hit( - dist_sq, - PickingRayHit::from_instance_and_t(*instance_hash, t), - false, - ); + let ui_distance_sq = picked_on_picking_rect.distance_squared(pointer_on_picking_rect) + / (context.pixels_from_points * context.pixels_from_points); + let picked_world_position = + gpu_picking_result.picked_world_position(picked_on_picking_rect.as_uvec2()); + state.check_hit( + ui_distance_sq, + PickingRayHit { + instance_path_hash: instance_path_hash_from_picking_layer_id(picked_id), + // TODO(andreas): Once this is the primary path we should not awkwardly reconstruct the ray_t here. It's not entirely correct either! + ray_t: picked_world_position.distance(context.ray_in_world.origin), + depth_offset: 0, + info: AdditionalPickingInfo::GpuPickingResult, + }, + false, + ); + } else { + // It is possible that some frames we don't get a picking result and the frame after we get several. + // We need to cache the last picking result and use it until we get a new one or the mouse leaves the screen. + // (Andreas: On my mac this *actually* happens in very simple scenes, I get occasional frames with 0 and then with 2 picking results!) + if let Some(PickingResult { + opaque_hit: Some(previous_opaque_hit), + .. + }) = previous_picking_result + { + if matches!( + previous_opaque_hit.info, + AdditionalPickingInfo::GpuPickingResult + ) { + state.closest_opaque_pick = previous_opaque_hit.clone(); } } } @@ -311,10 +370,10 @@ fn picking_lines( let b = ui_from_batch.project_point3(end.position); let side_ui_dist_sq = line_segment_distance_sq_to_point_2d( [a.truncate(), b.truncate()], - context.pointer_in_ui, + context.pointer_in_space2d, ); - if side_ui_dist_sq < context.max_side_ui_dist_sq { + if side_ui_dist_sq < state.closest_opaque_side_ui_dist_sq { let start_world = batch.world_from_obj.transform_point3(start.position); let end_world = batch.world_from_obj.transform_point3(end.position); let t = ray_closest_t_line_segment(&context.ray_in_world, [start_world, end_world]); @@ -329,31 +388,6 @@ fn picking_lines( } } -fn picking_meshes( - context: &PickingContext, - state: &mut PickingState, - meshes: &[super::MeshSource], -) { - crate::profile_function!(); - - for mesh in meshes { - if !mesh.picking_instance_hash.is_some() { - continue; - } - let ray_in_mesh = (mesh.world_from_mesh.inverse() * context.ray_in_world).normalize(); - let t = crate::math::ray_bbox_intersect(&ray_in_mesh, mesh.mesh.bbox()); - - if t < 0.0 { - let side_ui_dist_sq = 0.0; - state.check_hit( - side_ui_dist_sq, - PickingRayHit::from_instance_and_t(mesh.picking_instance_hash, t), - false, - ); - } - } -} - fn picking_textured_rects( context: &PickingContext, state: &mut PickingState, @@ -392,7 +426,6 @@ fn picking_textured_rects( ray_t: t, info: AdditionalPickingInfo::TexturedRect(glam::vec2(u, v)), depth_offset: rect.depth_offset, - used_gpu_picking: false, }; state.check_hit(0.0, picking_hit, rect.multiplicative_tint.a() < 1.0); } @@ -406,7 +439,7 @@ fn picking_ui_rects( ) { crate::profile_function!(); - let egui_pos = egui::pos2(context.pointer_in_ui.x, context.pointer_in_ui.y); + let egui_pos = egui::pos2(context.pointer_in_space2d.x, context.pointer_in_space2d.y); for (bbox, instance_hash) in &ui_data.pickable_ui_rects { let side_ui_dist_sq = bbox.distance_sq_to_pos(egui_pos); state.check_hit( @@ -416,7 +449,6 @@ fn picking_ui_rects( ray_t: 0.0, info: AdditionalPickingInfo::GuiOverlay, depth_offset: 0, - used_gpu_picking: false, }, false, ); diff --git a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs index e8a56e454b27..c495496b3c9a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs @@ -25,7 +25,7 @@ pub struct SceneSpatialPrimitives { pub textured_rectangles: Vec, pub line_strips: LineStripSeriesBuilder, - pub points: PointCloudBuilder, + pub points: PointCloudBuilder, pub meshes: Vec, pub depth_clouds: DepthClouds, diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs index d4811ca9864d..a2cbc5e371e3 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs @@ -42,21 +42,55 @@ pub trait ScenePart { /// Computes the instance hash that should be used for picking (in turn for selecting/hover) /// -/// Takes into account the currently the object properties, currently highlighted objects, and number of instances. -pub fn instance_path_hash_for_picking( +/// TODO(andreas): Resolve the hash-for-picking when retrieving the picking result instead of doing it ahead of time here to speed up things. +/// (gpu picking would always get the "most fine grained hash" which we could then resolve to groups etc. depending on selection state) +/// Right now this is a bit hard to do since number of instances depends on the Primary. This is expected to change soon. +pub fn instance_path_hash_for_picking( ent_path: &EntityPath, instance_key: re_log_types::component_types::InstanceKey, - entity_view: &re_query::EntityView, + entity_view: &re_query::EntityView, props: &EntityProperties, any_part_selected: bool, ) -> InstancePathHash { if props.interactive { - if entity_view.num_instances() == 1 || !any_part_selected { - InstancePathHash::entity_splat(ent_path) - } else { - InstancePathHash::instance(ent_path, instance_key) - } + InstancePathHash::instance( + ent_path, + instance_key_for_picking(instance_key, entity_view, any_part_selected), + ) } else { InstancePathHash::NONE } } + +/// Computes the instance key that should be used for picking (in turn for selecting/hover) +/// +/// Assumes the entity is interactive. +/// +/// TODO(andreas): Resolve the hash-for-picking when retrieving the picking result instead of doing it ahead of time here to speed up things. +/// (gpu picking would always get the "most fine grained hash" which we could then resolve to groups etc. depending on selection state) +/// Right now this is a bit hard to do since number of instances depends on the Primary. This is expected to change soon. +pub fn instance_key_for_picking( + instance_key: re_log_types::component_types::InstanceKey, + entity_view: &re_query::EntityView, + any_part_selected: bool, +) -> re_log_types::component_types::InstanceKey { + // If no part of the entity is selected or if there is only one instance, selecting + // should select the entire entity, not the specific instance. + // (the splat key means that no particular instance is selected but all at once instead) + if entity_view.num_instances() == 1 || !any_part_selected { + re_log_types::component_types::InstanceKey::SPLAT + } else { + instance_key + } +} + +/// See [`instance_key_for_picking`] +pub fn instance_key_to_picking_id( + instance_key: re_log_types::component_types::InstanceKey, + entity_view: &re_query::EntityView, + any_part_selected: bool, +) -> re_renderer::PickingLayerInstanceId { + re_renderer::PickingLayerInstanceId( + instance_key_for_picking(instance_key, entity_view, any_part_selected).0, + ) +} diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs index d2e86093e056..3811f0cf20ef 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs @@ -17,7 +17,7 @@ use crate::{ }, }; -use super::{instance_path_hash_for_picking, ScenePart}; +use super::{instance_key_to_picking_id, instance_path_hash_for_picking, ScenePart}; pub struct Points2DPart; @@ -26,7 +26,7 @@ impl Points2DPart { fn process_entity_view( scene: &mut SceneSpatial, _query: &SceneQuery<'_>, - props: &EntityProperties, + properties: &EntityProperties, entity_view: &EntityView, ent_path: &EntityPath, world_from_obj: Mat4, @@ -50,6 +50,11 @@ impl Points2DPart { .world_from_obj(world_from_obj) .outline_mask_ids(entity_highlight.overall); + if properties.interactive { + point_batch = + point_batch.picking_object_id(re_renderer::PickingLayerObjectId(ent_path.hash64())); + } + // TODO(andreas): This should follow the same batch processing as points3d. let visitor = |instance_key: InstanceKey, pos: Point2D, @@ -62,7 +67,7 @@ impl Points2DPart { ent_path, instance_key, entity_view, - props, + properties, entity_highlight.any_selection_highlight, ); @@ -88,11 +93,17 @@ impl Points2DPart { let radius = radius.map_or(Size::AUTO, |r| Size::new_scene(r.0)); let label = annotation_info.label(label.map(|l| l.0).as_ref()); - let point_range_builder = point_batch - .add_point_2d(pos) - .color(color) - .radius(radius) - .user_data(picking_instance_hash); + let mut point_range_builder = point_batch.add_point_2d(pos).color(color).radius(radius); + + // Set picking instance id if interactive. + if properties.interactive { + point_range_builder = + point_range_builder.picking_instance_id(instance_key_to_picking_id( + instance_key, + entity_view, + entity_highlight.any_selection_highlight, + )); + } // Check if this point is individually highlighted. if let Some(instance_mask_ids) = entity_highlight.instances.get(&instance_key) { @@ -119,7 +130,7 @@ impl Points2DPart { } // Generate keypoint connections if any. - scene.load_keypoint_connections(ent_path, keypoints, &annotations, props.interactive); + scene.load_keypoint_connections(ent_path, keypoints, &annotations, properties.interactive); Ok(()) } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs index 3303d8da0a43..5b349c52e791 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs @@ -17,7 +17,10 @@ use crate::{ annotations::ResolvedAnnotationInfo, scene::SceneQuery, view_spatial::{ - scene::{scene_part::instance_path_hash_for_picking, Keypoints}, + scene::{ + scene_part::{instance_key_to_picking_id, instance_path_hash_for_picking}, + Keypoints, + }, SceneSpatial, UiLabel, UiLabelTarget, }, Annotations, DefaultColor, @@ -176,30 +179,31 @@ impl Points3DPart { let (annotation_infos, keypoints) = Self::process_annotations(query, entity_view, &annotations)?; - let instance_path_hashes_for_picking = { - crate::profile_scope!("instance_hashes"); - entity_view - .iter_instance_keys()? - .map(|instance_key| { - instance_path_hash_for_picking( - ent_path, - instance_key, - entity_view, - properties, - entity_highlight.any_selection_highlight, - ) - }) - .collect::>() - }; let colors = Self::process_colors(entity_view, ent_path, &annotation_infos)?; let radii = Self::process_radii(ent_path, entity_view)?; - if show_labels && instance_path_hashes_for_picking.len() <= self.max_labels { + if show_labels && entity_view.num_instances() <= self.max_labels { // Max labels is small enough that we can afford iterating on the colors again. let colors = Self::process_colors(entity_view, ent_path, &annotation_infos)?.collect::>(); + let instance_path_hashes_for_picking = { + crate::profile_scope!("instance_hashes"); + entity_view + .iter_instance_keys()? + .map(|instance_key| { + instance_path_hash_for_picking( + ent_path, + instance_key, + entity_view, + properties, + entity_highlight.any_selection_highlight, + ) + }) + .collect::>() + }; + scene.ui.labels.extend(Self::process_labels( entity_view, &instance_path_hashes_for_picking, @@ -216,13 +220,27 @@ impl Points3DPart { .batch("3d points") .world_from_obj(world_from_obj) .outline_mask_ids(entity_highlight.overall); + if properties.interactive { + point_batch = point_batch + .picking_object_id(re_renderer::PickingLayerObjectId(ent_path.hash64())); + } let mut point_range_builder = point_batch .add_points(entity_view.num_instances(), point_positions) .colors(colors) - .radii(radii) - .user_data(instance_path_hashes_for_picking.into_iter()); + .radii(radii); + if properties.interactive { + point_range_builder = point_range_builder.picking_instance_ids( + entity_view.iter_instance_keys()?.map(|instance_key| { + instance_key_to_picking_id( + instance_key, + entity_view, + entity_highlight.any_selection_highlight, + ) + }), + ); + } - // Determine if there's any subranges that need extra highlighting. + // Determine if there's any sub-ranges that need extra highlighting. { crate::profile_scope!("marking additional highlight points"); for (highlighted_key, instance_mask_ids) in &entity_highlight.instances { diff --git a/crates/re_viewer/src/ui/view_spatial/ui.rs b/crates/re_viewer/src/ui/view_spatial/ui.rs index 6e5cbb887a67..91a14e813591 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui.rs @@ -9,17 +9,21 @@ use re_renderer::OutlineConfig; use crate::{ misc::{ - space_info::query_view_coordinates, SelectionHighlight, SpaceViewHighlights, ViewerContext, + space_info::query_view_coordinates, HoveredSpace, SelectionHighlight, SpaceViewHighlights, + ViewerContext, }, ui::{ - data_blueprint::DataBlueprintTree, space_view::ScreenshotMode, view_spatial::UiLabelTarget, + data_blueprint::DataBlueprintTree, + data_ui::{self, DataUi}, + space_view::ScreenshotMode, + view_spatial::UiLabelTarget, SpaceViewId, }, }; use super::{ eye::Eye, - scene::{PickingResult, SceneSpatialUiData}, + scene::{AdditionalPickingInfo, PickingResult, SceneSpatialUiData}, ui_2d::View2DState, ui_3d::View3DState, SceneSpatial, SpaceSpecs, @@ -59,8 +63,6 @@ impl From for WidgetText { } } -pub const PICKING_RECT_SIZE: u32 = 15; - #[derive(Clone, serde::Deserialize, serde::Serialize)] pub struct ViewSpatialState { /// How the scene is navigated. @@ -534,7 +536,7 @@ pub fn create_labels( let mut label_shapes = Vec::with_capacity(scene_ui.labels.len() * 2); - let ui_from_world_3d = eye3d.ui_from_world(ui_from_space2d.to()); + let ui_from_world_3d = eye3d.ui_from_world(*ui_from_space2d.to()); for label in &scene_ui.labels { let (wrap_width, text_anchor_pos) = match label.target { @@ -662,3 +664,192 @@ pub fn screenshot_context_menu( (response, None) } } + +#[allow(clippy::too_many_arguments)] +pub fn picking( + ctx: &mut ViewerContext<'_>, + mut response: egui::Response, + space_from_ui: egui::emath::RectTransform, + ui_clip_rect: egui::Rect, + parent_ui: &mut egui::Ui, + eye: Eye, + view_builder: &mut re_renderer::view_builder::ViewBuilder, + space_view_id: SpaceViewId, + state: &mut ViewSpatialState, + scene: &SceneSpatial, + space: &EntityPath, +) -> egui::Response { + crate::profile_function!(); + + let Some(pointer_pos_ui) = response.hover_pos() else { + state.previous_picking_result = None; + return response; + }; + + ctx.select_hovered_on_click(&response); + + let picking_context = super::scene::PickingContext::new( + pointer_pos_ui, + space_from_ui, + ui_clip_rect, + parent_ui.ctx().pixels_per_point(), + &eye, + ); + + let picking_rect_size = + super::scene::PickingContext::UI_INTERACTION_RADIUS * parent_ui.ctx().pixels_per_point(); + // Make the picking rect bigger than necessary so we can use it to counter act delays. + // (by the time the picking rectangle read back, the cursor may have moved on). + let picking_rect_size = (picking_rect_size * 2.0) + .ceil() + .at_least(8.0) + .at_most(128.0) as u32; + + let _ = view_builder.schedule_picking_rect( + ctx.render_ctx, + re_renderer::IntRect::from_middle_and_extent( + picking_context.pointer_in_pixel.as_ivec2(), + glam::uvec2(picking_rect_size, picking_rect_size), + ), + space_view_id.gpu_readback_id(), + (), + ctx.app_options.show_picking_debug_overlay, + ); + + let picking_result = picking_context.pick( + ctx.render_ctx, + space_view_id.gpu_readback_id(), + &state.previous_picking_result, + &scene.primitives, + &scene.ui, + ); + state.previous_picking_result = Some(picking_result.clone()); + + // Depth at pointer used for projecting rays from a hovered 2D view to corresponding 3D view(s). + // TODO(#1818): Depth at pointer only works for depth images so far. + let mut depth_at_pointer = None; + for hit in picking_result.iter_hits() { + let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) + else { continue; }; + + // Special hover ui for images. + let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { + scene + .ui + .images + .iter() + .find(|image| image.instance_path_hash == hit.instance_path_hash) + .map(|image| (image, uv)) + } else { + None + }; + response = if let Some((image, uv)) = picked_image_with_uv { + if let Some(meter) = image.meter { + if let Some(raw_value) = image.tensor.get(&[ + picking_context.pointer_in_space2d.y.round() as _, + picking_context.pointer_in_space2d.x.round() as _, + ]) { + let raw_value = raw_value.as_f64(); + let depth_in_meters = raw_value / meter as f64; + depth_at_pointer = Some(depth_in_meters as f32); + } + } + + response + .on_hover_cursor(egui::CursorIcon::Crosshair) + .on_hover_ui_at_pointer(|ui| { + ui.set_max_width(320.0); + + ui.vertical(|ui| { + ui.label(instance_path.to_string()); + instance_path.data_ui( + ctx, + ui, + crate::ui::UiVerbosity::Small, + &ctx.current_query(), + ); + + let tensor_view = ctx + .cache + .image + .get_colormapped_view(&image.tensor, &image.annotations); + + if let [h, w, ..] = image.tensor.shape() { + ui.separator(); + ui.horizontal(|ui| { + let (w, h) = (w.size as f32, h.size as f32); + let center = [(uv.x * w) as isize, (uv.y * h) as isize]; + if *state.nav_mode.get() == SpatialNavigationMode::TwoD { + let rect = egui::Rect::from_min_size( + egui::Pos2::ZERO, + egui::vec2(w, h), + ); + data_ui::image::show_zoomed_image_region_area_outline( + ui, + &tensor_view, + center, + space_from_ui.inverse().transform_rect(rect), + ); + } + data_ui::image::show_zoomed_image_region( + ui, + &tensor_view, + center, + image.meter, + ); + }); + } + }); + }) + } else { + // Hover ui for everything else + response.on_hover_ui_at_pointer(|ui| { + ctx.instance_path_button(ui, Some(space_view_id), &instance_path); + instance_path.data_ui( + ctx, + ui, + crate::ui::UiVerbosity::Reduced, + &ctx.current_query(), + ); + }) + }; + + ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { + pick.instance_path_hash + .resolve(&ctx.log_db.entity_db) + .map(|instance_path| { + crate::misc::Item::InstancePath(Some(space_view_id), instance_path) + }) + })); + } + + let hovered_space = match state.nav_mode.get() { + SpatialNavigationMode::TwoD => HoveredSpace::TwoD { + space_2d: space.clone(), + pos: picking_context + .pointer_in_space2d + .extend(depth_at_pointer.unwrap_or(f32::INFINITY)), + }, + SpatialNavigationMode::ThreeD => { + let hovered_point = picking_result.space_position(&picking_context.ray_in_world); + HoveredSpace::ThreeD { + space_3d: space.clone(), + pos: hovered_point, + tracked_space_camera: state.state_3d.tracked_camera.clone(), + point_in_space_cameras: scene + .space_cameras + .iter() + .map(|cam| { + ( + cam.instance_path_hash, + hovered_point.and_then(|pos| cam.project_onto_2d(pos)), + ) + }) + .collect(), + } + } + }; + ctx.selection_state_mut().set_hovered_space(hovered_space); + + response +} diff --git a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs index bb4c37eb9eb7..48b0255e8333 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs @@ -1,21 +1,17 @@ use eframe::emath::RectTransform; -use egui::{ - pos2, vec2, Align2, Color32, NumExt as _, Pos2, Rect, Response, ScrollArea, Shape, Vec2, -}; +use egui::{pos2, vec2, Align2, Color32, NumExt as _, Pos2, Rect, ScrollArea, Shape, Vec2}; use macaw::IsoTransform; use re_data_store::EntityPath; use re_renderer::view_builder::{TargetConfiguration, ViewBuilder}; use super::{ eye::Eye, - scene::AdditionalPickingInfo, - ui::{create_labels, screenshot_context_menu, PICKING_RECT_SIZE}, + ui::{create_labels, picking, screenshot_context_menu}, SpatialNavigationMode, ViewSpatialState, }; use crate::{ - misc::{HoveredSpace, Item, SpaceViewHighlights}, + misc::{HoveredSpace, SpaceViewHighlights}, ui::{ - data_ui::{self, DataUi}, view_spatial::{ ui::outline_config, ui_renderer_bridge::{ @@ -23,7 +19,7 @@ use crate::{ }, SceneSpatial, }, - SpaceViewId, UiVerbosity, + SpaceViewId, }, ViewerContext, }; @@ -341,134 +337,22 @@ fn view_2d_scrollable( SpatialNavigationMode::TwoD, ); - let should_do_hovering = !re_ui::egui_helpers::is_anything_being_dragged(parent_ui.ctx()); - - // Check if we're hovering any hover primitive. - let mut depth_at_pointer = None; - if let (true, Some(pointer_pos_ui)) = (should_do_hovering, response.hover_pos()) { - // Schedule GPU picking. - let pointer_in_pixel = ((pointer_pos_ui - response.rect.left_top()) - * parent_ui.ctx().pixels_per_point()) - .round(); - let _ = view_builder.schedule_picking_rect( - ctx.render_ctx, - re_renderer::IntRect::from_middle_and_extent( - glam::ivec2(pointer_in_pixel.x as i32, pointer_in_pixel.y as i32), - glam::uvec2(PICKING_RECT_SIZE, PICKING_RECT_SIZE), - ), - space_view_id.gpu_readback_id(), - (), - ctx.app_options.show_picking_debug_overlay, - ); - - let pointer_pos_space = space_from_ui.transform_pos(pointer_pos_ui); - let hover_radius = space_from_ui.scale().y * 5.0; // TODO(emilk): from egui? - let picking_result = scene.picking( - ctx.render_ctx, - space_view_id.gpu_readback_id(), - &state.previous_picking_result, - glam::vec2(pointer_pos_space.x, pointer_pos_space.y), - &scene_rect_accum, - &eye, - hover_radius, + if !re_ui::egui_helpers::is_anything_being_dragged(parent_ui.ctx()) { + response = picking( + ctx, + response, + space_from_ui, + painter.clip_rect(), + parent_ui, + eye, + &mut view_builder, + space_view_id, + state, + &scene, + space, ); - state.previous_picking_result = Some(picking_result.clone()); - - for hit in picking_result.iter_hits() { - let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) - else { continue; }; - - // Special hover ui for images. - let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { - scene - .ui - .images - .iter() - .find(|image| image.instance_path_hash == hit.instance_path_hash) - .map(|image| (image, uv)) - } else { - None - }; - response = if let Some((image, uv)) = picked_image_with_uv { - // TODO(andreas): This is different in 3d view. - if let Some(meter) = image.meter { - if let Some(raw_value) = image.tensor.get(&[ - pointer_pos_space.y.round() as _, - pointer_pos_space.x.round() as _, - ]) { - let raw_value = raw_value.as_f64(); - let depth_in_meters = raw_value / meter as f64; - depth_at_pointer = Some(depth_in_meters as f32); - } - } - - response - .on_hover_cursor(egui::CursorIcon::Crosshair) - .on_hover_ui_at_pointer(|ui| { - ui.set_max_width(320.0); - - ui.vertical(|ui| { - ui.label(instance_path.to_string()); - instance_path.data_ui( - ctx, - ui, - UiVerbosity::Small, - &ctx.current_query(), - ); - - let tensor_view = ctx - .cache - .image - .get_colormapped_view(&image.tensor, &image.annotations); - - if let [h, w, ..] = image.tensor.shape() { - ui.separator(); - ui.horizontal(|ui| { - // TODO(andreas): 3d skips the show_zoomed_image_region_rect part here. - let (w, h) = (w.size as f32, h.size as f32); - let center = [(uv.x * w) as isize, (uv.y * h) as isize]; - let rect = Rect::from_min_size(Pos2::ZERO, egui::vec2(w, h)); - data_ui::image::show_zoomed_image_region_area_outline( - parent_ui, - &tensor_view, - center, - ui_from_space.transform_rect(rect), - ); - data_ui::image::show_zoomed_image_region( - ui, - &tensor_view, - center, - image.meter, - ); - }); - } - }); - }) - } else { - // Hover ui for everything else - response.on_hover_ui_at_pointer(|ui| { - ctx.instance_path_button(ui, Some(space_view_id), &instance_path); - instance_path.data_ui( - ctx, - ui, - crate::ui::UiVerbosity::Reduced, - &ctx.current_query(), - ); - }) - }; - - ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { - pick.instance_path_hash - .resolve(&ctx.log_db.entity_db) - .map(|instance_path| Item::InstancePath(Some(space_view_id), instance_path)) - })); - } - } else { - state.previous_picking_result = None; } - ctx.select_hovered_on_click(&response); - // ------------------------------------------------------------------------ // Screenshot context menu. @@ -502,7 +386,6 @@ fn view_2d_scrollable( )); } - project_onto_other_spaces(ctx, space, &response, &space_from_ui, depth_at_pointer); painter.extend(show_projections_from_3d_space( ctx, parent_ui, @@ -552,27 +435,6 @@ fn setup_target_config( // ------------------------------------------------------------------------ -fn project_onto_other_spaces( - ctx: &mut ViewerContext<'_>, - space: &EntityPath, - response: &Response, - space_from_ui: &RectTransform, - z: Option, -) { - if let Some(pointer_in_screen) = response.hover_pos() { - let pointer_in_space = space_from_ui.transform_pos(pointer_in_screen); - ctx.selection_state_mut() - .set_hovered_space(HoveredSpace::TwoD { - space_2d: space.clone(), - pos: glam::vec3( - pointer_in_space.x, - pointer_in_space.y, - z.unwrap_or(f32::INFINITY), - ), - }); - } -} - fn show_projections_from_3d_space( ctx: &ViewerContext<'_>, ui: &egui::Ui, diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 36d7044d1408..e62782460ae3 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -13,16 +13,14 @@ use re_renderer::{ use crate::{ misc::{HoveredSpace, Item, SpaceViewHighlights}, ui::{ - data_ui::{self, DataUi}, view_spatial::{ - scene::AdditionalPickingInfo, - ui::{create_labels, outline_config, screenshot_context_menu, PICKING_RECT_SIZE}, + ui::{create_labels, outline_config, picking, screenshot_context_menu}, ui_renderer_bridge::{ fill_view_builder, get_viewport, renderer_paint_callback, ScreenBackground, }, SceneSpatial, SpaceCamera3D, SpatialNavigationMode, }, - SpaceViewId, UiVerbosity, + SpaceViewId, }, ViewerContext, }; @@ -41,7 +39,7 @@ pub struct View3DState { pub orbit_eye: Option, /// Currently tracked camera. - tracked_camera: Option, + pub tracked_camera: Option, /// Camera pose just before we took over another camera via [Self::tracked_camera]. camera_before_tracked_camera: Option, @@ -358,134 +356,22 @@ pub fn view_3d( SpatialNavigationMode::ThreeD, ); - let should_do_hovering = !re_ui::egui_helpers::is_anything_being_dragged(ui.ctx()); - - // TODO(andreas): We're very close making the hover reaction of ui2d and ui3d the same. Finish the job! - // Check if we're hovering any hover primitive. - if let (true, Some(pointer_pos)) = (should_do_hovering, response.hover_pos()) { - // Schedule GPU picking. - let pointer_in_pixel = - ((pointer_pos - rect.left_top()) * ui.ctx().pixels_per_point()).round(); - let _ = view_builder.schedule_picking_rect( - ctx.render_ctx, - re_renderer::IntRect::from_middle_and_extent( - glam::ivec2(pointer_in_pixel.x as i32, pointer_in_pixel.y as i32), - glam::uvec2(PICKING_RECT_SIZE, PICKING_RECT_SIZE), - ), - space_view_id.gpu_readback_id(), - (), - ctx.app_options.show_picking_debug_overlay, + if !re_ui::egui_helpers::is_anything_being_dragged(ui.ctx()) { + response = picking( + ctx, + response, + RectTransform::from_to(rect, rect), + rect, + ui, + eye, + &mut view_builder, + space_view_id, + state, + &scene, + space, ); - - let picking_result = scene.picking( - ctx.render_ctx, - space_view_id.gpu_readback_id(), - &state.previous_picking_result, - glam::vec2(pointer_pos.x, pointer_pos.y), - &rect, - &eye, - 5.0, - ); - state.previous_picking_result = Some(picking_result.clone()); - - for hit in picking_result.iter_hits() { - let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) - else { continue; }; - - // Special hover ui for images. - let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { - scene - .ui - .images - .iter() - .find(|image| image.instance_path_hash == hit.instance_path_hash) - .map(|image| (image, uv)) - } else { - None - }; - response = if let Some((image, uv)) = picked_image_with_uv { - response - .on_hover_cursor(egui::CursorIcon::Crosshair) - .on_hover_ui_at_pointer(|ui| { - ui.set_max_width(320.0); - - ui.vertical(|ui| { - ui.label(instance_path.to_string()); - instance_path.data_ui( - ctx, - ui, - UiVerbosity::Small, - &ctx.current_query(), - ); - - let tensor_view = ctx - .cache - .image - .get_colormapped_view(&image.tensor, &image.annotations); - - if let [h, w, ..] = &image.tensor.shape[..] { - ui.separator(); - ui.horizontal(|ui| { - let (w, h) = (w.size as f32, h.size as f32); - let center = [(uv.x * w) as isize, (uv.y * h) as isize]; - data_ui::image::show_zoomed_image_region( - ui, - &tensor_view, - center, - image.meter, - ); - }); - } - }); - }) - } else { - // Hover ui for everything else - response.on_hover_ui_at_pointer(|ui| { - ctx.instance_path_button(ui, Some(space_view_id), &instance_path); - instance_path.data_ui( - ctx, - ui, - crate::ui::UiVerbosity::Reduced, - &ctx.current_query(), - ); - }) - }; - } - - ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { - pick.instance_path_hash - .resolve(&ctx.log_db.entity_db) - .map(|instance_path| Item::InstancePath(Some(space_view_id), instance_path)) - })); - - let hovered_point = picking_result - .opaque_hit - .as_ref() - .or_else(|| picking_result.transparent_hits.last()) - .map(|hit| picking_result.space_position(hit)); - - ctx.selection_state_mut() - .set_hovered_space(HoveredSpace::ThreeD { - space_3d: space.clone(), - pos: hovered_point, - tracked_space_camera: state.state_3d.tracked_camera.clone(), - point_in_space_cameras: scene - .space_cameras - .iter() - .map(|cam| { - ( - cam.instance_path_hash, - hovered_point.and_then(|pos| cam.project_onto_2d(pos)), - ) - }) - .collect(), - }); - } else { - state.previous_picking_result = None; } - ctx.select_hovered_on_click(&response); - // Double click changes camera if response.double_clicked() { state.state_3d.tracked_camera = None;