Skip to content

Commit 4f020a3

Browse files
committed
resolve conflicts. Delete unused code
1 parent 88733c9 commit 4f020a3

File tree

2 files changed

+0
-46
lines changed

2 files changed

+0
-46
lines changed

src/otx/algo/object_detection_3d/heads/depthaware_transformer.py

-25
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
"""depth aware transformer head for 3d object detection."""
55
from __future__ import annotations
66

7-
import math
87
from typing import Any, Callable, ClassVar
98

109
import torch
@@ -101,30 +100,6 @@ def _reset_parameters(self) -> None:
101100
constant_(self.reference_points.bias.data, 0.0)
102101
normal_(self.level_embed)
103102

104-
def get_proposal_pos_embed(self, proposals: Tensor) -> Tensor:
105-
"""Generate position embeddings for proposal tensor.
106-
107-
Args:
108-
proposals (Tensor): Proposal tensor of shape (N, L, 6).
109-
110-
TODO (Kirill): Not used. Remove this function?
111-
112-
Returns:
113-
Tensor: Position embeddings for proposal tensor of shape (N, L, embedding_dim).
114-
"""
115-
num_pos_feats = 128
116-
temperature = 10000
117-
scale = 2 * math.pi
118-
119-
dim_t = torch.arange(num_pos_feats, dtype=torch.float32, device=proposals.device)
120-
dim_t = temperature ** (2 * (dim_t // 2) / num_pos_feats)
121-
# N, L, 6
122-
proposals = proposals.sigmoid() * scale
123-
# N, L, 6, 128
124-
pos = proposals[:, :, :, None] / dim_t
125-
# N, L, 6, 64, 2
126-
return torch.stack((pos[:, :, :, 0::2].sin(), pos[:, :, :, 1::2].cos()), dim=4).flatten(2)
127-
128103
def get_valid_ratio(self, mask: Tensor) -> Tensor:
129104
"""Calculate the valid ratio of the mask.
130105

tests/unit/algo/object_detection_3d/heads/test_depthaware_transformer.py

-21
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import torch
88
from otx.algo.object_detection_3d.heads.depthaware_transformer import (
99
DepthAwareTransformerBuilder,
10-
gen_sineembed_for_position,
1110
)
1211

1312

@@ -57,16 +56,6 @@ def test_depth_aware_transformer_forward(self, depth_aware_transformer):
5756
assert output[2].shape == (1, 550, 2)
5857
assert output[4] is None
5958

60-
def test_depth_aware_transformer_get_proposal_pos_embed(self, depth_aware_transformer):
61-
# Create dummy input tensor
62-
proposals = torch.randn(2, 10, 6)
63-
64-
# Get proposal position embeddings
65-
pos_embed = depth_aware_transformer.get_proposal_pos_embed(proposals)
66-
67-
# Check output shape
68-
assert pos_embed.shape == (2, 10, 768)
69-
7059
def test_depth_aware_transformer_get_valid_ratio(self, depth_aware_transformer):
7160
# Create dummy input tensor
7261
mask = torch.randn(2, 32, 32) > 0
@@ -76,13 +65,3 @@ def test_depth_aware_transformer_get_valid_ratio(self, depth_aware_transformer):
7665

7766
# Check output shape
7867
assert valid_ratio.shape == (2, 2)
79-
80-
def test_gen_sineembed_for_position(self):
81-
# Create dummy input tensor
82-
pos_tensor = torch.randn(2, 4, 6)
83-
84-
# Generate sine embeddings for position tensor
85-
pos_embed = gen_sineembed_for_position(pos_tensor)
86-
87-
# Check output shape
88-
assert pos_embed.shape == (2, 4, 768)

0 commit comments

Comments
 (0)