From 7bb2384367f970d4e4067a8d3890ca3ee818041d Mon Sep 17 00:00:00 2001 From: Gregor Lenz Date: Tue, 16 Jul 2024 18:48:31 -0600 Subject: [PATCH] add start_time and end_time to ToFrame --- tonic/functional/to_frame.py | 36 ++++++++++++++++++-------------- tonic/slicers.py | 20 ++++++++++++++---- tonic/transforms.py | 40 ++++++++++++++++++++++++++++-------- 3 files changed, 69 insertions(+), 27 deletions(-) diff --git a/tonic/functional/to_frame.py b/tonic/functional/to_frame.py index af8dc23a..2aa9ed1c 100644 --- a/tonic/functional/to_frame.py +++ b/tonic/functional/to_frame.py @@ -9,14 +9,16 @@ def to_frame_numpy( - events, - sensor_size, - time_window=None, - event_count=None, - n_time_bins=None, - n_event_bins=None, - overlap=0.0, - include_incomplete=False, + events, + sensor_size, + time_window=None, + event_count=None, + n_time_bins=None, + n_event_bins=None, + overlap=0.0, + include_incomplete=False, + start_time=None, + end_time=None, ): """Accumulate events to frames by slicing along constant time (time_window), constant number of events (event_count) or constant number of frames (n_time_bins / n_event_bins). @@ -37,11 +39,11 @@ def to_frame_numpy( assert "x" and "t" and "p" in events.dtype.names if ( - not sum( - param is not None - for param in [time_window, event_count, n_time_bins, n_event_bins] - ) - == 1 + not sum( + param is not None + for param in [time_window, event_count, n_time_bins, n_event_bins] + ) + == 1 ): raise ValueError( "Please assign a value to exactly one of the parameters time_window," @@ -67,7 +69,12 @@ def to_frame_numpy( if time_window: event_slices = slice_events_by_time( - events, time_window, overlap=overlap, include_incomplete=include_incomplete + events, + time_window, + overlap=overlap, + include_incomplete=include_incomplete, + start_time=start_time, + end_time=end_time, ) elif event_count: event_slices = slice_events_by_count( @@ -93,4 +100,3 @@ def to_frame_numpy( for i, event_slice in enumerate(event_slices): np.add.at(frames, (i, event_slice["p"].astype(int), event_slice["x"]), 1) return frames - diff --git a/tonic/slicers.py b/tonic/slicers.py index dde46b61..d0598c67 100644 --- a/tonic/slicers.py +++ b/tonic/slicers.py @@ -71,6 +71,8 @@ class SliceByTime: time_window: float overlap: float = 0.0 include_incomplete: bool = False + start_time: float = None + end_time: float = None def slice(self, data: np.ndarray, targets: int) -> List[np.ndarray]: metadata = self.get_slice_metadata(data, targets) @@ -83,13 +85,17 @@ def get_slice_metadata( stride = self.time_window - self.overlap assert stride > 0 + start_time = t[0] if self.start_time is None else self.start_time + end_time = t[-1] if self.end_time is None else self.end_time + duration = end_time - start_time + if self.include_incomplete: - n_slices = int(np.ceil(((t[-1] - t[0]) - self.time_window) / stride) + 1) + n_slices = int(np.ceil((duration - self.time_window) / stride) + 1) else: - n_slices = int(np.floor(((t[-1] - t[0]) - self.time_window) / stride) + 1) + n_slices = int(np.floor((duration - self.time_window) / stride) + 1) n_slices = max(n_slices, 1) # for strides larger than recording time - window_start_times = np.arange(n_slices) * stride + t[0] + window_start_times = np.arange(n_slices) * stride + start_time window_end_times = window_start_times + self.time_window indices_start = np.searchsorted(t, window_start_times)[:n_slices] indices_end = np.searchsorted(t, window_end_times)[:n_slices] @@ -293,9 +299,15 @@ def slice_events_by_time( time_window: int, overlap: int = 0, include_incomplete: bool = False, + start_time=None, + end_time=None, ): return SliceByTime( - time_window=time_window, overlap=overlap, include_incomplete=include_incomplete + time_window=time_window, + overlap=overlap, + include_incomplete=include_incomplete, + start_time=start_time, + end_time=end_time, ).slice(events, None)[0] diff --git a/tonic/transforms.py b/tonic/transforms.py index 41302a0f..ac9052e5 100644 --- a/tonic/transforms.py +++ b/tonic/transforms.py @@ -61,7 +61,7 @@ def __call__(self, events: np.ndarray) -> np.ndarray: if type(self.size) == int: self.size = [self.size, self.size] offsets = (self.sensor_size[0] - self.size[0]) // 2, ( - self.sensor_size[1] - self.size[1] + self.sensor_size[1] - self.size[1] ) // 2 offset_idx = [max(offset, 0) for offset in offsets] cropped_events = events[ @@ -69,7 +69,7 @@ def __call__(self, events: np.ndarray) -> np.ndarray: & (events["x"] < (offset_idx[0] + self.size[0])) & (offset_idx[1] <= events["y"]) & (events["y"] < (offset_idx[1] + self.size[1])) - ] + ] cropped_events["x"] -= offsets[0] cropped_events["y"] -= offsets[1] return cropped_events @@ -229,7 +229,7 @@ class DropPixel: def __call__(self, events): if len(events) == 0: - return events # return empty array + return events # return empty array if events.dtype.names is not None: # assert "x", "y", "p" in events.dtype.names @@ -788,10 +788,10 @@ class NumpyAsType: def __call__(self, events): source_is_structured_array = ( - hasattr(events.dtype, "names") and events.dtype.names != None + hasattr(events.dtype, "names") and events.dtype.names != None ) target_is_structured_array = ( - hasattr(self.dtype, "names") and self.dtype.names != None + hasattr(self.dtype, "names") and self.dtype.names != None ) if source_is_structured_array and not target_is_structured_array: return np.lib.recfunctions.structured_to_unstructured(events, self.dtype) @@ -880,6 +880,10 @@ class ToFrame: overlap is defined by the fraction of a bin between 0 and 1. include_incomplete (bool): If True, includes overhang slice when time_window or event_count is specified. Not valid for bin_count methods. + start_time (float): Optional start time if some empty frames are expected in the beginning. If omitted, the + start time is the timestamp of the first event for that sample. + end_time (float): Optional end time if some empty frames are expected in the end. If omitted, the end time + is the timestamp of the last event for that sample. Example: >>> from tonic.transforms import ToFrame @@ -895,17 +899,35 @@ class ToFrame: n_event_bins: Optional[int] = None overlap: float = 0 include_incomplete: bool = False + start_time: Optional[float] = None + end_time: Optional[float] = None def __call__(self, events): # if events are empty, return a frame in the expected format if len(events) == 0: if self.time_window is not None or self.event_count is not None: - return np.zeros((1, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1])) + return np.zeros( + (1, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1]) + ) elif self.n_event_bins is not None: - return np.zeros((self.n_event_bins, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1])) + return np.zeros( + ( + self.n_event_bins, + self.sensor_size[2], + self.sensor_size[0], + self.sensor_size[1], + ) + ) elif self.n_time_bins is not None: - return np.zeros((self.n_time_bins, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1])) + return np.zeros( + ( + self.n_time_bins, + self.sensor_size[2], + self.sensor_size[0], + self.sensor_size[1], + ) + ) else: raise ValueError("No slicing method specified.") @@ -919,6 +941,8 @@ def __call__(self, events): n_event_bins=self.n_event_bins, overlap=self.overlap, include_incomplete=self.include_incomplete, + start_time=self.start_time, + end_time=self.end_time, )