Skip to content

Commit

Permalink
add start_time and end_time to ToFrame
Browse files Browse the repository at this point in the history
  • Loading branch information
biphasic committed Jul 17, 2024
1 parent 6cc95a2 commit 7bb2384
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 27 deletions.
36 changes: 21 additions & 15 deletions tonic/functional/to_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,16 @@


def to_frame_numpy(
events,
sensor_size,
time_window=None,
event_count=None,
n_time_bins=None,
n_event_bins=None,
overlap=0.0,
include_incomplete=False,
events,
sensor_size,
time_window=None,
event_count=None,
n_time_bins=None,
n_event_bins=None,
overlap=0.0,
include_incomplete=False,
start_time=None,
end_time=None,
):
"""Accumulate events to frames by slicing along constant time (time_window), constant number of
events (event_count) or constant number of frames (n_time_bins / n_event_bins).
Expand All @@ -37,11 +39,11 @@ def to_frame_numpy(
assert "x" and "t" and "p" in events.dtype.names

if (
not sum(
param is not None
for param in [time_window, event_count, n_time_bins, n_event_bins]
)
== 1
not sum(
param is not None
for param in [time_window, event_count, n_time_bins, n_event_bins]
)
== 1
):
raise ValueError(
"Please assign a value to exactly one of the parameters time_window,"
Expand All @@ -67,7 +69,12 @@ def to_frame_numpy(

if time_window:
event_slices = slice_events_by_time(
events, time_window, overlap=overlap, include_incomplete=include_incomplete
events,
time_window,
overlap=overlap,
include_incomplete=include_incomplete,
start_time=start_time,
end_time=end_time,
)
elif event_count:
event_slices = slice_events_by_count(
Expand All @@ -93,4 +100,3 @@ def to_frame_numpy(
for i, event_slice in enumerate(event_slices):
np.add.at(frames, (i, event_slice["p"].astype(int), event_slice["x"]), 1)
return frames

20 changes: 16 additions & 4 deletions tonic/slicers.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ class SliceByTime:
time_window: float
overlap: float = 0.0
include_incomplete: bool = False
start_time: float = None
end_time: float = None

def slice(self, data: np.ndarray, targets: int) -> List[np.ndarray]:
metadata = self.get_slice_metadata(data, targets)
Expand All @@ -83,13 +85,17 @@ def get_slice_metadata(
stride = self.time_window - self.overlap
assert stride > 0

start_time = t[0] if self.start_time is None else self.start_time
end_time = t[-1] if self.end_time is None else self.end_time
duration = end_time - start_time

if self.include_incomplete:
n_slices = int(np.ceil(((t[-1] - t[0]) - self.time_window) / stride) + 1)
n_slices = int(np.ceil((duration - self.time_window) / stride) + 1)
else:
n_slices = int(np.floor(((t[-1] - t[0]) - self.time_window) / stride) + 1)
n_slices = int(np.floor((duration - self.time_window) / stride) + 1)
n_slices = max(n_slices, 1) # for strides larger than recording time

window_start_times = np.arange(n_slices) * stride + t[0]
window_start_times = np.arange(n_slices) * stride + start_time
window_end_times = window_start_times + self.time_window
indices_start = np.searchsorted(t, window_start_times)[:n_slices]
indices_end = np.searchsorted(t, window_end_times)[:n_slices]
Expand Down Expand Up @@ -293,9 +299,15 @@ def slice_events_by_time(
time_window: int,
overlap: int = 0,
include_incomplete: bool = False,
start_time=None,
end_time=None,
):
return SliceByTime(
time_window=time_window, overlap=overlap, include_incomplete=include_incomplete
time_window=time_window,
overlap=overlap,
include_incomplete=include_incomplete,
start_time=start_time,
end_time=end_time,
).slice(events, None)[0]


Expand Down
40 changes: 32 additions & 8 deletions tonic/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,15 +61,15 @@ def __call__(self, events: np.ndarray) -> np.ndarray:
if type(self.size) == int:
self.size = [self.size, self.size]
offsets = (self.sensor_size[0] - self.size[0]) // 2, (
self.sensor_size[1] - self.size[1]
self.sensor_size[1] - self.size[1]
) // 2
offset_idx = [max(offset, 0) for offset in offsets]
cropped_events = events[
(offset_idx[0] <= events["x"])
& (events["x"] < (offset_idx[0] + self.size[0]))
& (offset_idx[1] <= events["y"])
& (events["y"] < (offset_idx[1] + self.size[1]))
]
]
cropped_events["x"] -= offsets[0]
cropped_events["y"] -= offsets[1]
return cropped_events
Expand Down Expand Up @@ -229,7 +229,7 @@ class DropPixel:

def __call__(self, events):
if len(events) == 0:
return events # return empty array
return events # return empty array

if events.dtype.names is not None:
# assert "x", "y", "p" in events.dtype.names
Expand Down Expand Up @@ -788,10 +788,10 @@ class NumpyAsType:

def __call__(self, events):
source_is_structured_array = (
hasattr(events.dtype, "names") and events.dtype.names != None
hasattr(events.dtype, "names") and events.dtype.names != None
)
target_is_structured_array = (
hasattr(self.dtype, "names") and self.dtype.names != None
hasattr(self.dtype, "names") and self.dtype.names != None
)
if source_is_structured_array and not target_is_structured_array:
return np.lib.recfunctions.structured_to_unstructured(events, self.dtype)
Expand Down Expand Up @@ -880,6 +880,10 @@ class ToFrame:
overlap is defined by the fraction of a bin between 0 and 1.
include_incomplete (bool): If True, includes overhang slice when time_window or event_count is specified.
Not valid for bin_count methods.
start_time (float): Optional start time if some empty frames are expected in the beginning. If omitted, the
start time is the timestamp of the first event for that sample.
end_time (float): Optional end time if some empty frames are expected in the end. If omitted, the end time
is the timestamp of the last event for that sample.
Example:
>>> from tonic.transforms import ToFrame
Expand All @@ -895,17 +899,35 @@ class ToFrame:
n_event_bins: Optional[int] = None
overlap: float = 0
include_incomplete: bool = False
start_time: Optional[float] = None
end_time: Optional[float] = None

def __call__(self, events):

# if events are empty, return a frame in the expected format
if len(events) == 0:
if self.time_window is not None or self.event_count is not None:
return np.zeros((1, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1]))
return np.zeros(
(1, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1])
)
elif self.n_event_bins is not None:
return np.zeros((self.n_event_bins, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1]))
return np.zeros(
(
self.n_event_bins,
self.sensor_size[2],
self.sensor_size[0],
self.sensor_size[1],
)
)
elif self.n_time_bins is not None:
return np.zeros((self.n_time_bins, self.sensor_size[2], self.sensor_size[0], self.sensor_size[1]))
return np.zeros(
(
self.n_time_bins,
self.sensor_size[2],
self.sensor_size[0],
self.sensor_size[1],
)
)
else:
raise ValueError("No slicing method specified.")

Expand All @@ -919,6 +941,8 @@ def __call__(self, events):
n_event_bins=self.n_event_bins,
overlap=self.overlap,
include_incomplete=self.include_incomplete,
start_time=self.start_time,
end_time=self.end_time,
)


Expand Down

0 comments on commit 7bb2384

Please sign in to comment.