Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add assortativity to torch_geometric.utils #5587

Merged
merged 13 commits into from
Oct 5, 2022
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

## [2.2.0] - 2022-MM-DD
### Added
- Added `assortativity` that computes degree assortativity coefficient ([#5587](https://github.com/pyg-team/pytorch_geometric/pull/5587))
- Added `SSGConv` layer ([#5599](https://github.com/pyg-team/pytorch_geometric/pull/5599))
- Added `shuffle_node`, `mask_feature` and `add_random_edge` augmentation methdos ([#5548](https://github.com/pyg-team/pytorch_geometric/pull/5548))
- Added `dropout_path` augmentation that drops edges from a graph based on random walks ([#5531](https://github.com/pyg-team/pytorch_geometric/pull/5531))
Expand Down
18 changes: 18 additions & 0 deletions test/utils/test_assortativity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import pytest
import torch

from torch_geometric.utils import assortativity


def test_assortativity():
# completely assortative graph
edge_index = torch.tensor([[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 5],
[1, 2, 3, 0, 2, 3, 0, 1, 3, 0, 1, 2, 5, 4]])
out = assortativity(edge_index)
assert pytest.approx(out, abs=1e-5) == 1.0

# completely disassortative graph
edge_index = torch.tensor([[0, 1, 2, 3, 4, 5, 5, 5, 5, 5],
[5, 5, 5, 5, 5, 0, 1, 2, 3, 4]])
out = assortativity(edge_index)
assert pytest.approx(out, abs=1e-5) == -1.0
4 changes: 3 additions & 1 deletion torch_geometric/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from .degree import degree
from .softmax import softmax
from .dropout import dropout_adj, dropout_node, dropout_edge, dropout_path
from .augmentation import shuffle_node, mask_feature, add_random_edge
from .sort_edge_index import sort_edge_index
from .coalesce import coalesce
from .undirected import is_undirected, to_undirected
Expand All @@ -11,6 +12,7 @@
from .subgraph import (get_num_hops, subgraph, k_hop_subgraph,
bipartite_subgraph)
from .homophily import homophily
from .assortativity import assortativity
from .get_laplacian import get_laplacian
from .get_mesh_laplacian import get_mesh_laplacian
from .mask import index_to_mask, mask_to_index
Expand All @@ -34,7 +36,6 @@
structured_negative_sampling_feasible)
from .train_test_split_edges import train_test_split_edges
from .scatter import scatter
from .augmentation import shuffle_node, mask_feature, add_random_edge

__all__ = [
'degree',
Expand Down Expand Up @@ -63,6 +64,7 @@
'bipartite_subgraph',
'k_hop_subgraph',
'homophily',
'assortativity',
'get_laplacian',
'get_mesh_laplacian',
'index_to_mask',
Expand Down
66 changes: 66 additions & 0 deletions torch_geometric/utils/assortativity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import torch
from torch_sparse import SparseTensor

from torch_geometric.typing import Adj
from torch_geometric.utils import coalesce, degree

from .to_dense_adj import to_dense_adj


def assortativity(edge_index: Adj) -> float:
r"""The degree assortativity coefficient from the
`"Mixing patterns in networks"
<https://arxiv.org/abs/cond-mat/0209450>`_ paper.
Assortativity in a network refers to the tendency of nodes to
connect with other similar nodes over dissimilar nodes.
It is computed from Pearson correlation coefficient of the node degrees.

Args:
edge_index (Tensor or SparseTensor): The graph connectivity.

Returns:
The value of the degree assortativity coefficient for the input
graph :math:`\in [-1, 1]`

Example:

>>> edge_index = torch.tensor([[0, 1, 2, 3, 2],
... [1, 2, 0, 1, 3]])
>>> assortativity(edge_index)
-0.666667640209198
"""
if isinstance(edge_index, SparseTensor):
row, col, _ = edge_index.coo()
else:
row, col = edge_index

device = row.device
out_deg = degree(row, dtype=torch.long)
in_deg = degree(col, dtype=torch.long)
degrees = torch.unique(torch.cat([out_deg, in_deg]))
mapping = row.new_zeros(degrees.max().item() + 1)
mapping[degrees] = torch.arange(degrees.size(0), device=device)
EdisonLeeeee marked this conversation as resolved.
Show resolved Hide resolved

# Compute degree mixing matrix (joint probability distribution) `M`
num_degrees = degrees.size(0)
src_deg = mapping[out_deg[row]]
dst_deg = mapping[in_deg[col]]

pairs = torch.stack([src_deg, dst_deg], dim=0)
occurrence = torch.ones(pairs.size(1), device=device)
pairs, occurrence = coalesce(pairs, occurrence)
M = to_dense_adj(pairs, edge_attr=occurrence, max_num_nodes=num_degrees)[0]
# normalization
M /= M.sum()

# numeric assortativity coefficient, computed by
# Pearson correlation coefficient of the node degrees
x = y = degrees.float()
a, b = M.sum(0), M.sum(1)

vara = (a * x**2).sum() - ((a * x).sum())**2
varb = (b * x**2).sum() - ((b * x).sum())**2
xy = torch.outer(x, y)
ab = torch.outer(a, b)
out = (xy * (M - ab)).sum() / (vara * varb).sqrt()
return out.item()
EdisonLeeeee marked this conversation as resolved.
Show resolved Hide resolved