-
Notifications
You must be signed in to change notification settings - Fork 95
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Synchronize master and develop workflows (#236) * Synchronize master and develop workflows * comment * Added OpenDR citation (#238) * Added OpenDR citation * Update README.md * Fixes bibtex name (#241) Fixes citation name * Fix clang (#250) * Integration of heart anomaly detection self-attention neural bag of features (#246) * added sanbof models * added attention models to ci test Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Make `test release` docker target the specific branch when the label is run manually (#252) * Fix * Better approach * Fix * Update CODEOWNERS (#253) Co-authored-by: Nikolaos Passalis <passalis@users.noreply.github.com> Co-authored-by: Kateryna Chumachenko <katerynaCh@users.noreply.github.com> Co-authored-by: Stefania Pedrazzi <stefaniapedrazzi@users.noreply.github.com>
- Loading branch information
1 parent
00a9fb4
commit 5f6778b
Showing
13 changed files
with
227 additions
and
58 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Validating CODEOWNERS rules …
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
63 changes: 63 additions & 0 deletions
63
.../perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm/samodels.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
import torch | ||
import torch.nn as nn | ||
import torch.nn.functional as F | ||
|
||
|
||
class SelfAttention(nn.Module): | ||
def __init__(self, n_codeword, series_length, att_type): | ||
super(SelfAttention, self).__init__() | ||
|
||
assert att_type in ['spatialsa', 'temporalsa', 'spatiotemporal'] | ||
|
||
self.att_type = att_type | ||
self.hidden_dim = 128 | ||
|
||
self.n_codeword = n_codeword | ||
self.series_length = series_length | ||
|
||
if self.att_type == 'spatiotemporal': | ||
self.w_s = nn.Linear(n_codeword, self.hidden_dim) | ||
self.w_t = nn.Linear(series_length, self.hidden_dim) | ||
elif self.att_type == 'spatialsa': | ||
self.w_1 = nn.Linear(series_length, self.hidden_dim) | ||
self.w_2 = nn.Linear(series_length, self.hidden_dim) | ||
elif self.att_type == 'temporalsa': | ||
self.w_1 = nn.Linear(n_codeword, self.hidden_dim) | ||
self.w_2 = nn.Linear(n_codeword, self.hidden_dim) | ||
self.drop = nn.Dropout(0.2) | ||
self.alpha = nn.Parameter(data=torch.Tensor(1), requires_grad=True) | ||
|
||
def forward(self, x): | ||
# dimension order of x: batch_size, in_channels, series_length | ||
|
||
# clip the value of alpha to [0, 1] | ||
with torch.no_grad(): | ||
self.alpha.copy_(torch.clip(self.alpha, 0.0, 1.0)) | ||
|
||
if self.att_type == 'spatiotemporal': | ||
q = self.w_t(x) | ||
x_s = x.transpose(-1, -2) | ||
k = self.w_s(x_s) | ||
qkt = q @ k.transpose(-2, -1)*(self.hidden_dim**-0.5) | ||
mask = F.sigmoid(qkt) | ||
x = x * self.alpha + (1.0 - self.alpha) * x * mask | ||
|
||
elif self.att_type == 'temporalsa': | ||
x1 = x.transpose(-1, -2) | ||
q = self.w_1(x1) | ||
k = self.w_2(x1) | ||
mask = F.softmax(q @ k.transpose(-2, -1)*(self.hidden_dim**-0.5), dim=-1) | ||
mask = self.drop(mask) | ||
temp = mask @ x1 | ||
x1 = x1 * self.alpha + (1.0 - self.alpha) * temp | ||
x = x1.transpose(-2, -1) | ||
|
||
elif self.att_type == 'spatialsa': | ||
q = self.w_1(x) | ||
k = self.w_2(x) | ||
mask = F.softmax(q @ k.transpose(-2, -1)*(self.hidden_dim**-0.5), dim=-1) | ||
mask = self.drop(mask) | ||
temp = mask @ x | ||
x = x * self.alpha + (1.0 - self.alpha) * temp | ||
|
||
return x |
Oops, something went wrong.