Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
fix: overfit test (#137)
Browse files Browse the repository at this point in the history
* fix: overfit test

I realized I put the wrong sign in the overfit test.
I will also change the margin in it to 0.5

* fix: keras euclidean triplet loss

* fix: reduce distance margin

* fix: sqrt-induced possible nans
  • Loading branch information
Tadej Svetina authored Oct 18, 2021
1 parent beb5f3a commit 0285280
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 7 deletions.
8 changes: 7 additions & 1 deletion finetuner/tuner/keras/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,13 @@ def call(self, inputs, **kwargs):
# https://github.com/tensorflow/tensorflow/issues/12071
dist_pos = tf.reduce_sum(tf.math.squared_difference(anchor, positive), axis=-1)
dist_neg = tf.reduce_sum(tf.math.squared_difference(anchor, negative), axis=-1)
return tf.reduce_mean(tf.nn.relu(dist_pos - dist_neg + self._margin))

dist_pos = tf.maximum(dist_pos, 1e-9)
dist_neg = tf.maximum(dist_neg, 1e-9)

return tf.reduce_mean(
tf.nn.relu(tf.sqrt(dist_pos) - tf.sqrt(dist_neg) + self._margin)
)


class CosineTripletLoss(BaseLoss, Layer):
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/keras/test_overfit.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def test_overfit_keras(
vec_embedings = embed_model(vecs).numpy()

# Compute distances between embeddings
metric = 'sqeuclidean' if loss.startswith('Euclidean') else 'cosine'
metric = 'euclidean' if loss.startswith('Euclidean') else 'cosine'
dists = squareform(pdist(vec_embedings, metric=metric))

# Make sure that for each class, the two instances are closer than
Expand All @@ -62,4 +62,4 @@ def test_overfit_keras(
dist_other = dists[2 * i : 2 * i + 2, :].copy()
dist_other[:, 2 * i : 2 * i + 2] = 10_000

assert cls_dist < dist_other.min() + 1
assert cls_dist < dist_other.min() - 0.1
4 changes: 2 additions & 2 deletions tests/integration/paddle/test_overfit.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def test_overfit_paddle(
vec_embedings = embed_model(paddle.Tensor(vecs)).numpy()

# Compute distances between embeddings
metric = 'sqeuclidean' if 'Euclidean' in loss else 'cosine'
metric = 'euclidean' if 'Euclidean' in loss else 'cosine'
dists = squareform(pdist(vec_embedings, metric=metric))

# Make sure that for each class, the two instances are closer than
Expand All @@ -64,4 +64,4 @@ def test_overfit_paddle(
dist_other = dists[2 * i : 2 * i + 2, :].copy()
dist_other[:, 2 * i : 2 * i + 2] = 10_000

assert cls_dist < dist_other.min() + 1
assert cls_dist < dist_other.min() - 0.1
4 changes: 2 additions & 2 deletions tests/integration/torch/test_overfit.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def test_overfit_pytorch(
vec_embedings = embed_model(torch.Tensor(vecs)).numpy()

# Compute distances between embeddings
metric = 'sqeuclidean' if 'Euclidean' in loss else 'cosine'
metric = 'euclidean' if 'Euclidean' in loss else 'cosine'
dists = squareform(pdist(vec_embedings, metric=metric))

# Make sure that for each class, the two instances are closer than
Expand All @@ -64,4 +64,4 @@ def test_overfit_pytorch(
dist_other = dists[2 * i : 2 * i + 2, :].copy()
dist_other[:, 2 * i : 2 * i + 2] = 10_000

assert cls_dist < dist_other.min() + 1
assert cls_dist < dist_other.min() - 0.1

0 comments on commit 0285280

Please sign in to comment.