Skip to content

Commit

Permalink
Fixes after code review
Browse files Browse the repository at this point in the history
  • Loading branch information
jkrajniak committed May 26, 2024
1 parent 7ac8232 commit 8fcdb0c
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 34 deletions.
6 changes: 3 additions & 3 deletions nx_parallel/algorithms/centrality/betweenness.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,14 +97,14 @@ def edge_betweenness_centrality(
"""The parallel computation is implemented by dividing the nodes into chunks and
computing edge betweenness centrality for each chunk concurrently.
networkx.edge_betweenness_centrality : https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.centrality.edge_betweenness_centrality.html
Parameters
------------
----------
get_chunks : str, function (default = "chunks")
A function that takes in a list of all the nodes as input and returns an
iterable `node_chunks`. The default chunking is done by slicing the
`nodes` into `n` chunks, where `n` is the number of CPU cores.
networkx.edge_betweenness_centrality : https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.centrality.edge_betweenness_centrality.html
"""
if hasattr(G, "graph_object"):
G = G.graph_object
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,6 @@ def get_chunk(nodes):
return get_chunk


def get_chunk_edges_random(nodes):
num_chunks = nxp.cpu_count()
chunks = [[] for _ in range(num_chunks)]
for i, node in enumerate(nodes):
chunks[i % num_chunks].append(node)
return chunks


def test_betweenness_centrality_get_chunks():
G = nx.fast_gnp_random_graph(100, 0.1, directed=False)
H = nxp.ParallelGraph(G)
Expand All @@ -47,17 +39,3 @@ def test_betweenness_centrality_get_chunks():
assert math.isclose(par_bc[i], par_bc_chunk[i], abs_tol=1e-16)
# get_chunk is faster than default(for big graphs)
# G = nx.bipartite.random_graph(400, 700, 0.8, seed=5, directed=False)


def test_edge_betweenness_centrality_get_chunks():
G = nx.fast_gnp_random_graph(100, 0.1, directed=False)
H = nxp.ParallelGraph(G)
par_bc_chunk = nxp.edge_betweenness_centrality(
H, get_chunks=get_chunk_edges_random
) # smoke test
par_bc = nxp.edge_betweenness_centrality(H)

for e in G.edges:
assert math.isclose(par_bc[e], par_bc_chunk[e], abs_tol=1e-16)
# get_chunk is faster than default(for big graphs)
# G = nx.bipartite.random_graph(400, 700, 0.8, seed=5, directed=False)
11 changes: 2 additions & 9 deletions timing/timing_all_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,13 @@

# Code to create README heatmap for all functions in function_list
heatmapDF = pd.DataFrame()
function_list = [
nx.betweenness_centrality,
nx.edge_betweenness_centrality,
nx.closeness_vitality,
nx.local_efficiency,
]
function_list = [nx.betweenness_centrality, nx.closeness_vitality, nx.local_efficiency]
number_of_nodes_list = [10, 20, 50, 300, 600]

for i in range(0, len(function_list)):
currFun = function_list[i]
for j in range(0, len(number_of_nodes_list)):
num = number_of_nodes_list[j]
print(f"Starting {currFun} with {num} nodes")

# create original and parallel graphs
G = nx.fast_gnp_random_graph(num, 0.5, directed=False)
Expand All @@ -44,7 +38,7 @@
for j in range(0, len(number_of_nodes_list)):
num = number_of_nodes_list[j]
G = nx.tournament.random_tournament(num)
H = nx_parallel.ParallelGraph(G)
H = nx_parallel.ParallelDiGraph(G)
t1 = time.time()
c = nx.tournament.is_reachable(H, 1, num)
t2 = time.time()
Expand All @@ -64,7 +58,6 @@
hm.set_yticklabels(
[
"betweenness_centrality",
"edge_betweenness_centrality",
"closeness_vitality",
"local_efficiency",
"tournament is_reachable",
Expand Down

0 comments on commit 8fcdb0c

Please sign in to comment.