Skip to content

Commit

Permalink
add missing ensure_spacing tests
Browse files Browse the repository at this point in the history
* was previously only being tested indirectly via tests for peak_local_max
  • Loading branch information
grlee77 committed Apr 27, 2024
1 parent 2c6a1cb commit 27a1544
Show file tree
Hide file tree
Showing 2 changed files with 100 additions and 0 deletions.
2 changes: 2 additions & 0 deletions python/cucim/src/cucim/skimage/_shared/coord.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@ def ensure_spacing(
if len(coords):
coords = cp.atleast_2d(coords)
coords = cp.asnumpy(coords)
if not np.isscalar(spacing):
spacing = cp.asnumpy(spacing)
if min_split_size is None:
batch_list = [coords]
else:
Expand Down
98 changes: 98 additions & 0 deletions python/cucim/src/cucim/skimage/_shared/tests/test_coord.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import time

import cupy as cp
import numpy as np
import pytest
from scipy.spatial.distance import minkowski, pdist

from cucim.skimage._shared.coord import ensure_spacing


@pytest.mark.parametrize("p", [1, 2, np.inf])
@pytest.mark.parametrize("size", [30, 50, None])
def test_ensure_spacing_trivial(p, size):
# --- Empty input
assert ensure_spacing(cp.asarray([]), p_norm=p).size == 0

# --- A unique point
coord = cp.random.randn(1, 2)
assert cp.array_equal(
coord, ensure_spacing(coord, p_norm=p, min_split_size=size)
)

# --- Verified spacing
coord = cp.random.randn(100, 2)

# --- 0 spacing
assert cp.array_equal(
coord, ensure_spacing(coord, spacing=0, p_norm=p, min_split_size=size)
)

# Spacing is chosen to be half the minimum distance
coord_cpu = cp.asnumpy(coord)
spacing = cp.asarray(pdist(coord_cpu, metric=minkowski, p=p).min() * 0.5)

out = ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size)

assert cp.array_equal(coord, out)


@pytest.mark.parametrize("ndim", [1, 2, 3, 4, 5])
@pytest.mark.parametrize("size", [2, 10, None])
def test_ensure_spacing_nD(ndim, size):
coord = cp.ones((5, ndim))

expected = cp.ones((1, ndim))

assert cp.array_equal(ensure_spacing(coord, min_split_size=size), expected)


@pytest.mark.parametrize("p", [1, 2, np.inf])
@pytest.mark.parametrize("size", [50, 100, None])
def test_ensure_spacing_batch_processing(p, size):
coord_cpu = np.random.randn(100, 2)

# --- Consider the average distance btween the point as spacing
spacing = cp.asarray(np.median(pdist(coord_cpu, metric=minkowski, p=p)))
coord = cp.asarray(coord_cpu)

expected = ensure_spacing(coord, spacing=spacing, p_norm=p)

cp.testing.assert_array_equal(
ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size),
expected,
)


def test_max_batch_size():
"""Small batches are slow, large batches -> large allocations -> also slow.
https://github.com/scikit-image/scikit-image/pull/6035#discussion_r751518691
"""
coords = cp.random.randint(low=0, high=1848, size=(40000, 2))
tstart = time.time()
ensure_spacing(coords, spacing=100, min_split_size=50, max_split_size=2000)
dur1 = time.time() - tstart

tstart = time.time()
ensure_spacing(coords, spacing=100, min_split_size=50, max_split_size=20000)
dur2 = time.time() - tstart

# Originally checked dur1 < dur2 to assert that the default batch size was
# faster than a much larger batch size. However, on rare occasion a CI test
# case would fail with dur1 ~5% larger than dur2. To be more robust to
# variable load or differences across architectures, we relax this here.
assert dur1 < 1.33 * dur2


@pytest.mark.parametrize("p", [1, 2, np.inf])
@pytest.mark.parametrize("size", [30, 50, None])
def test_ensure_spacing_p_norm(p, size):
coord_cpu = np.random.randn(100, 2)

# --- Consider the average distance btween the point as spacing
spacing = cp.asarray(np.median(pdist(coord_cpu, metric=minkowski, p=p)))
coord = cp.asarray(coord_cpu)
out = ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size)

assert pdist(cp.asnumpy(out), metric=minkowski, p=p).min() > spacing

0 comments on commit 27a1544

Please sign in to comment.