Skip to content

Commit 72caca4

Browse files
committed
Fix linting issues with ruff
1 parent 9646cbf commit 72caca4

File tree

7 files changed

+61
-33
lines changed

7 files changed

+61
-33
lines changed

_nx_parallel/__init__.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,15 +90,29 @@ def get_info():
9090
'get_chunks : str, function (default = "chunks")': "A function that takes in a list of all the nodes as input and returns an iterable `node_chunks`. The default chunking is done by slicing the `nodes` into `n_jobs` number of chunks."
9191
},
9292
},
93+
"closeness_centrality": {
94+
"url": "https://github.com/networkx/nx-parallel/blob/main/nx_parallel/algorithms/centrality/closeness.py#L9",
95+
"additional_docs": "The parallel computation is implemented by dividing the nodes into chunks and computing closeness centrality for each chunk concurrently.",
96+
"additional_parameters": {
97+
"G : graph": 'A NetworkX graph u : node, optional Return only the value for node u distance : string or function, optional The edge attribute to use as distance when computing shortest paths, or a user-defined distance function. wf_improved : bool, optional If True, use the improved formula for closeness centrality. get_chunks : str, function (default = "chunks") A function that takes in a list of all the nodes as input and returns an iterable `node_chunks`. The default chunking is done by slicing the `nodes` into `n_jobs` number of chunks.'
98+
},
99+
},
93100
"closeness_vitality": {
94101
"url": "https://github.com/networkx/nx-parallel/blob/main/nx_parallel/algorithms/vitality.py#L10",
95102
"additional_docs": "The parallel computation is implemented only when the node is not specified. The closeness vitality for each node is computed concurrently.",
96103
"additional_parameters": {
97104
'get_chunks : str, function (default = "chunks")': "A function that takes in a list of all the nodes as input and returns an iterable `node_chunks`. The default chunking is done by slicing the `nodes` into `n_jobs` number of chunks."
98105
},
99106
},
107+
"degree_centrality": {
108+
"url": "https://github.com/networkx/nx-parallel/blob/main/nx_parallel/algorithms/centrality/degree.py#L8",
109+
"additional_docs": "Parallel computation of degree centrality. Divides nodes into chunks and computes degree centrality for each chunk concurrently.",
110+
"additional_parameters": {
111+
'get_chunks : str, function (default = "chunks")': "A function that takes in a list of all the nodes as input and returns an iterable `node_chunks`. The default chunking is done by slicing the `nodes` into `n_jobs` number of chunks."
112+
},
113+
},
100114
"edge_betweenness_centrality": {
101-
"url": "https://github.com/networkx/nx-parallel/blob/main/nx_parallel/algorithms/centrality/betweenness.py#L96",
115+
"url": "https://github.com/networkx/nx-parallel/blob/main/nx_parallel/algorithms/centrality/betweenness.py#L104",
102116
"additional_docs": "The parallel computation is implemented by dividing the nodes into chunks and computing edge betweenness centrality for each chunk concurrently.",
103117
"additional_parameters": {
104118
'get_chunks : str, function (default = "chunks")': "A function that takes in a list of all the nodes as input and returns an iterable `node_chunks`. The default chunking is done by slicing the `nodes` into `n_jobs` number of chunks."

nx_parallel/algorithms/centrality/betweenness.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,20 +52,20 @@ def betweenness_centrality(
5252
node_chunks = nxp.create_iterables(G, "node", n_jobs, nodes)
5353
else:
5454
node_chunks = get_chunks(nodes)
55-
55+
5656
# Handle empty chunks
57-
if not node_chunks:
57+
if not node_chunks:
5858
return {}
59-
59+
6060
bt_cs = Parallel()(
6161
delayed(_betweenness_centrality_node_subset)(G, chunk, weight, endpoints)
6262
for chunk in node_chunks
6363
)
6464

6565
# Handle empty results
66-
if not bt_cs:
66+
if not bt_cs:
6767
return {}
68-
68+
6969
# Reducing partial solution
7070
bt_c = bt_cs[0]
7171
for bt in bt_cs[1:]:

nx_parallel/algorithms/centrality/closeness.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def closeness_centrality(
4545
if u is not None:
4646
result = _closeness_centrality_node_subset(G, nodes, distance, wf_improved)
4747
return result[u]
48-
48+
4949
n_jobs = nxp.get_n_jobs()
5050

5151
# Validate get_chunks - the chunk parameter is only used for parallel execution
@@ -66,9 +66,7 @@ def closeness_centrality(
6666
return {}
6767

6868
cc_subs = Parallel()(
69-
delayed(_closeness_centrality_node_subset)(
70-
G, chunk, distance, wf_improved
71-
)
69+
delayed(_closeness_centrality_node_subset)(G, chunk, distance, wf_improved)
7270
for chunk in node_chunks
7371
)
7472

@@ -82,38 +80,38 @@ def closeness_centrality(
8280
def _closeness_centrality_node_subset(G, nodes, distance=None, wf_improved=True):
8381
"""
8482
Compute closeness centrality for a subset of nodes.
85-
83+
8684
Implemented to match NetworkX's implementation exactly.
8785
"""
8886
# Create a copy of the graph to avoid modifying the original
8987
# Handle directed graphs by reversing (matches NetworkX implementation)
9088
if G.is_directed():
9189
G = G.reverse() # create a reversed graph view
92-
90+
9391
closeness_dict = {}
94-
92+
9593
for n in nodes:
9694
# Using the exact NetworkX path calculation logic
9795
if distance is not None:
9896
# Use Dijkstra for weighted graphs
9997
sp = nx.single_source_dijkstra_path_length(G, n, weight=distance)
10098
else:
101-
# Use BFS for unweighted graphs
99+
# Use BFS for unweighted graphs
102100
sp = nx.single_source_shortest_path_length(G, n)
103-
101+
104102
# Sum of shortest paths exactly as NetworkX does it
105103
totsp = sum(sp.values())
106104
len_G = len(G)
107105
_closeness_centrality = 0.0
108-
106+
109107
# Use the exact NetworkX formula and conditions
110108
if totsp > 0.0 and len_G > 1:
111109
_closeness_centrality = (len(sp) - 1.0) / totsp
112110
# Use the exact normalization formula from NetworkX
113111
if wf_improved:
114112
s = (len(sp) - 1.0) / (len_G - 1)
115113
_closeness_centrality *= s
116-
114+
117115
closeness_dict[n] = _closeness_centrality
118-
119-
return closeness_dict
116+
117+
return closeness_dict

nx_parallel/algorithms/centrality/degree.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from joblib import Parallel, delayed
22
import nx_parallel as nxp
3-
import networkx as nx
43

54
__all__ = ["degree_centrality"]
65

@@ -61,4 +60,4 @@ def _degree_centrality_node_subset(G, nodes):
6160

6261
for node in nodes:
6362
part_dc[node] = G.degree[node] / (n - 1)
64-
return part_dc
63+
return part_dc

nx_parallel/algorithms/centrality/tests/test_closeness_centrality.py

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import math
44
import pytest
55

6+
67
def test_betweenness_centrality_get_chunks():
78
def get_chunk(nodes):
89
num_chunks = nxp.get_n_jobs()
@@ -49,11 +50,11 @@ def test_betweenness_centrality_weighted_graph():
4950
"""Test betweenness centrality on a weighted graph."""
5051
G = nx.fast_gnp_random_graph(100, 0.1, directed=False)
5152
for u, v in G.edges:
52-
G[u][v]['weight'] = 1.0 # Assign uniform weights
53+
G[u][v]["weight"] = 1.0 # Assign uniform weights
5354

5455
H = nxp.ParallelGraph(G)
55-
par_bc = nxp.betweenness_centrality(H, weight='weight')
56-
expected_bc = nx.betweenness_centrality(G, weight='weight')
56+
par_bc = nxp.betweenness_centrality(H, weight="weight")
57+
expected_bc = nx.betweenness_centrality(G, weight="weight")
5758

5859
for node in G.nodes:
5960
assert math.isclose(par_bc[node], expected_bc[node], abs_tol=1e-16)
@@ -78,7 +79,9 @@ def test_betweenness_centrality_empty_graph():
7879

7980
# Check if the underlying graph is empty before calling the function
8081
if len(H.graph_object) == 0: # Use the underlying graph's length
81-
assert nxp.betweenness_centrality(H) == {}, "Expected an empty dictionary for an empty graph"
82+
assert (
83+
nxp.betweenness_centrality(H) == {}
84+
), "Expected an empty dictionary for an empty graph"
8285
else:
8386
pytest.fail("Graph is not empty, but it should be.")
8487

@@ -104,7 +107,9 @@ def test_betweenness_centrality_large_graph():
104107
expected_bc = nx.betweenness_centrality(G)
105108

106109
for node in G.nodes:
107-
assert math.isclose(par_bc[node], expected_bc[node], abs_tol=1e-6) # Larger tolerance for large graphs
110+
assert math.isclose(
111+
par_bc[node], expected_bc[node], abs_tol=1e-6
112+
) # Larger tolerance for large graphs
108113

109114

110115
def test_betweenness_centrality_multigraph():
@@ -134,6 +139,7 @@ def test_closeness_centrality_default_chunks():
134139

135140
def test_closeness_centrality_custom_chunks():
136141
"""Test closeness centrality with a custom chunking function."""
142+
137143
def custom_chunking(nodes):
138144
# Example custom chunking: split nodes into two equal parts
139145
mid = len(nodes) // 2
@@ -154,7 +160,9 @@ def test_closeness_centrality_empty_graph():
154160
G = nx.Graph() # An empty graph
155161
H = nxp.ParallelGraph(G)
156162

157-
assert nxp.closeness_centrality(H, get_chunks="chunks") == {}, "Expected an empty dictionary for an empty graph"
163+
assert (
164+
nxp.closeness_centrality(H, get_chunks="chunks") == {}
165+
), "Expected an empty dictionary for an empty graph"
158166

159167

160168
def test_closeness_centrality_single_node():
@@ -178,4 +186,4 @@ def test_closeness_centrality_large_graph():
178186
expected_cc = nx.closeness_centrality(G)
179187

180188
for node in G.nodes:
181-
assert pytest.approx(par_cc[node], rel=1e-6) == expected_cc[node]
189+
assert pytest.approx(par_cc[node], rel=1e-6) == expected_cc[node]

nx_parallel/algorithms/centrality/tests/test_degree_centrality.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ def test_degree_centrality_default_chunks():
2121

2222
def test_degree_centrality_custom_chunks():
2323
"""Test degree centrality with custom chunking."""
24+
2425
def get_chunk(nodes):
2526
num_chunks = nxp.get_n_jobs()
2627
chunks = [[] for _ in range(num_chunks)]
@@ -132,4 +133,6 @@ def test_degree_centrality_large_graph():
132133
expected_dc = nx.degree_centrality(G)
133134

134135
for node in G.nodes:
135-
assert math.isclose(par_dc[node], expected_dc[node], abs_tol=1e-6) # Larger tolerance for large graphs
136+
assert math.isclose(
137+
par_dc[node], expected_dc[node], abs_tol=1e-6
138+
) # Larger tolerance for large graphs

timing/timing_all_functions.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,15 +56,21 @@
5656
G = nx.tournament.random_tournament(num)
5757
H = nx_parallel.ParallelGraph(G)
5858
t1 = time.time()
59-
c = nx.tournament.is_reachable(H, 0, num - 1) # Provide source (0) and target (num - 1)
59+
c = nx.tournament.is_reachable(
60+
H, 0, num - 1
61+
) # Provide source (0) and target (num - 1)
6062
t2 = time.time()
6163
parallelTime = t2 - t1
6264
t1 = time.time()
63-
c = nx.tournament.is_reachable(G, 0, num - 1) # Provide source (0) and target (num - 1)
65+
c = nx.tournament.is_reachable(
66+
G, 0, num - 1
67+
) # Provide source (0) and target (num - 1)
6468
t2 = time.time()
6569
stdTime = t2 - t1
6670
timesFaster = stdTime / parallelTime
67-
heatmapDF.at[j, len(function_list)] = timesFaster # Add this as a new row in the heatmap
71+
heatmapDF.at[j, len(function_list)] = (
72+
timesFaster # Add this as a new row in the heatmap
73+
)
6874
print("Finished nx.tournament.is_reachable")
6975

7076
# plotting the heatmap with numbers and a green color scheme
@@ -81,7 +87,7 @@
8187
]
8288

8389
# Ensure the number of labels matches the number of rows in heatmapDF
84-
hm.set_yticklabels(labels[:len(heatmapDF.columns)])
90+
hm.set_yticklabels(labels[: len(heatmapDF.columns)])
8591

8692
# Adding x-axis labels
8793
hm.set_xticklabels(number_of_nodes_list)

0 commit comments

Comments
 (0)