-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodel_sub_Att.py
28 lines (24 loc) · 1.14 KB
/
model_sub_Att.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import torch
import torch.nn as nn
import torch.nn.functional as F
from layers_sub_Att import SpGraphAttentionLayer
class ADN_SpGAT(nn.Module):
def __init__(self, nfeat, nhid, dropout, alpha, nheads):
"""Sparse version of GAT."""
super(ADN_SpGAT, self).__init__()
self.dropout = dropout
self.attentions = [SpGraphAttentionLayer(nfeat,
nhid,
dropout=dropout,
alpha=alpha,
concat=False) for _ in range(nheads)]
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention)
def forward(self, x, adj, no_need_param):
neighborhood_emb = torch.cat([encoder(x, adj)[0] for encoder in self.attentions], dim=1)
assert len(self.attentions) == 1
encoder_1 = self.attentions[0]
map_fun_W = encoder_1(x, adj)[1]
mapped_fea = torch.mm(x, map_fun_W)
new_emb_mat = F.elu(mapped_fea - neighborhood_emb)
return new_emb_mat