From 88193c0f566a42482983d1b67df174378d642bda Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Thu, 22 Feb 2024 02:24:20 +0530 Subject: [PATCH 01/47] Improve test coverage for bipartite extendability (#7306) * Improve testcov for bipextendability * change function name * changing fnx name --- networkx/algorithms/bipartite/tests/test_extendability.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/networkx/algorithms/bipartite/tests/test_extendability.py b/networkx/algorithms/bipartite/tests/test_extendability.py index d7ae34e4c1b0..17b7124341bd 100644 --- a/networkx/algorithms/bipartite/tests/test_extendability.py +++ b/networkx/algorithms/bipartite/tests/test_extendability.py @@ -29,6 +29,14 @@ def test_no_perfect_matching_raises(): nx.bipartite.maximal_extendability(G) +def test_residual_graph_not_strongly_connected_raises(): + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + with pytest.raises( + nx.NetworkXError, match="The residual graph of G is not strongly connected" + ): + nx.bipartite.maximal_extendability(G) + + def test_ladder_graph_is_1(): G = nx.ladder_graph(3) assert nx.bipartite.maximal_extendability(G) == 1 From b7782faa065834b9151caaed63653807fc5ab826 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 21 Feb 2024 22:46:12 -0600 Subject: [PATCH 02/47] CI: Update scientific-python/upload-nightly-action from 0.3.0 to 0.4.0 (#7309) * Update the scientific-python/upload-nightly-action to v0.4.0 to avoid issues with uploading wheels of the same name to Anaconda Cloud where the project being uploaded to only contains one wheel (the one being replaced). - c.f. https://github.com/scientific-python/upload-nightly-action/releases/tag/0.4.0 --- .github/workflows/nightly.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a8bc0c0575f5..a8010ade75e5 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -27,7 +27,7 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/networkx-*.whl - name: Upload nighlty wheel - uses: scientific-python/upload-nightly-action@6e9304f7a3a5501c6f98351537493ec898728299 # 0.3.0 + uses: scientific-python/upload-nightly-action@95f7bf6a22281b8072fae929429dd0408f09ea63 # 0.4.0 with: anaconda_nightly_upload_token: ${{ secrets.ANACONDA_NIGHTLY }} artifacts_path: dist/ From 8cffd1e7004a94be4556121cc3aff442772542d0 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 22 Feb 2024 11:28:22 -0600 Subject: [PATCH 03/47] CI: Group dependabot updates (#7308) * Group dependabot updates to reduce the number of PRs. - c.f. sp-repo-review GH212: Require GHA update grouping https://learn.scientific-python.org/development/guides/gha-basic/#GH212 --- .github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 76c0f80b78a4..ec68dc5ee50c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,6 +4,10 @@ updates: directory: "/" schedule: interval: "monthly" + groups: + actions: + patterns: + - "*" labels: - "type: Maintenance" - package-ecosystem: "pip" From eec4012d14ec04467f13712b55ea03be53718136 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Fri, 23 Feb 2024 13:42:39 -0600 Subject: [PATCH 04/47] Transmogrify `_dispatchable` objects into functions (#7298) * Transmogrify `_dispatchable` objects into functions * fix? --- networkx/classes/tests/test_backends.py | 14 +++++++++++++- networkx/utils/backends.py | 10 ++++++++++ networkx/utils/decorators.py | 8 +++++++- 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/networkx/classes/tests/test_backends.py b/networkx/classes/tests/test_backends.py index e1a7c1cf6ffd..cc171cf5be13 100644 --- a/networkx/classes/tests/test_backends.py +++ b/networkx/classes/tests/test_backends.py @@ -17,8 +17,16 @@ def test_dispatch_kwds_vs_args(): def test_pickle(): + count = 0 for name, func in nx.utils.backends._registered_algorithms.items(): - assert pickle.loads(pickle.dumps(func)) is func + try: + # Some functions can't be pickled, but it's not b/c of _dispatchable + pickled = pickle.dumps(func) + except pickle.PicklingError: + continue + assert pickle.loads(pickled) is func + count += 1 + assert count > 0 assert pickle.loads(pickle.dumps(nx.inverse_line_graph)) is nx.inverse_line_graph @@ -74,3 +82,7 @@ def convert_to_nx(obj, *, name=None): del LoopbackDispatcher.from_scipy_sparse_array with pytest.raises(ImportError, match="Unable to load"): nx.from_scipy_sparse_array(A, backend="bad-backend-name") + + +def test_dispatchable_are_functions(): + assert type(nx.pagerank) is type(nx.pagerank.orig_func) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 73cf29469db5..d1491f510782 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -94,10 +94,15 @@ class WrappedSparse: import networkx as nx from ..exception import NetworkXNotImplemented +from .decorators import argmap __all__ = ["_dispatchable"] +def _do_nothing(): + """This does nothing at all, yet it helps turn `_dispatchable` into functions.""" + + def _get_backends(group, *, load_and_call=False): items = entry_points(group=group) rv = {} @@ -348,6 +353,11 @@ def __new__( raise KeyError( f"Algorithm already exists in dispatch registry: {name}" ) from None + # Use the magic of `argmap` to turn `self` into a function. This does result + # in small additional overhead compared to calling `_dispatchable` directly, + # but `argmap` has the magical property that it can stack with other `argmap` + # decorators "for free". Being a function is better for REPRs and type-checkers. + self = argmap(_do_nothing)(self) _registered_algorithms[name] = self return self diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py index e986434bd43a..ea37a506e199 100644 --- a/networkx/utils/decorators.py +++ b/networkx/utils/decorators.py @@ -1107,7 +1107,13 @@ def signature(cls, f): if prev == param.POSITIONAL_ONLY != kind: # the last token was position-only, but this one isn't def_sig.append("/") - if prev != param.KEYWORD_ONLY == kind != param.VAR_POSITIONAL: + if ( + param.VAR_POSITIONAL + != prev + != param.KEYWORD_ONLY + == kind + != param.VAR_POSITIONAL + ): # param is the first keyword-only arg and isn't starred def_sig.append("*") From 26ccbd7090265b0ad6f89a6edb96c873e90b419c Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Sat, 24 Feb 2024 11:19:35 +0530 Subject: [PATCH 05/47] CI: update upload-nightly-action to 0.5.0 (#7311) --- .github/workflows/nightly.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a8010ade75e5..ee854c0045fd 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -27,7 +27,7 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/networkx-*.whl - name: Upload nighlty wheel - uses: scientific-python/upload-nightly-action@95f7bf6a22281b8072fae929429dd0408f09ea63 # 0.4.0 + uses: scientific-python/upload-nightly-action@b67d7fcc0396e1128a474d1ab2b48aa94680f9fc # 0.5.0 with: anaconda_nightly_upload_token: ${{ secrets.ANACONDA_NIGHTLY }} artifacts_path: dist/ From de85e3fe52879f819e7a7924474fc6be3994e8e4 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Sun, 25 Feb 2024 07:25:18 +0530 Subject: [PATCH 06/47] renaming backend `func_info` dictionary's keys (#7219) * renamed extra_parameters to additional_parameters * renamed extra_docstring to backend_func_docs * added backend_func_examples and backend_func_url * added example heading * small edit * style fix * bug fix * updated backends.py * style fix * code refactoring * style fix * Update backends.py * updated lines * rm backend_func_examples * using walrus operator instead of 'if' by @eriknw Co-authored-by: Erik Welch * used walrus operator * style fix * used walrus opr * style fix * style fix * Updated key names * Updated comment --------- Co-authored-by: Erik Welch --- networkx/utils/backends.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index d1491f510782..8dfb82496de3 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -86,7 +86,6 @@ class WrappedSparse: """ import inspect import os -import sys import warnings from functools import partial from importlib.metadata import entry_points @@ -972,19 +971,26 @@ def _make_doc(self): continue func_info = info["functions"][self.name] - if "extra_docstring" in func_info: + + # Renaming extra_docstring to additional_docs + if func_docs := ( + func_info.get("additional_docs") or func_info.get("extra_docstring") + ): lines.extend( - f" {line}" if line else line - for line in func_info["extra_docstring"].split("\n") + f" {line}" if line else line for line in func_docs.split("\n") ) add_gap = True else: add_gap = False - if "extra_parameters" in func_info: + + # Renaming extra_parameters to additional_parameters + if extra_parameters := ( + func_info.get("extra_parameters") + or func_info.get("additional_parameters") + ): if add_gap: lines.append("") - lines.append(" Extra parameters:") - extra_parameters = func_info["extra_parameters"] + lines.append(" Additional parameters:") for param in sorted(extra_parameters): lines.append(f" {param}") if desc := extra_parameters[param]: @@ -993,6 +999,10 @@ def _make_doc(self): else: lines.append("") + if func_url := func_info.get("url"): + lines.append(f"[`Source <{func_url}>`_]") + lines.append("") + lines.pop() # Remove last empty line to_add = "\n ".join(lines) return f"{self._orig_doc.rstrip()}\n\n {to_add}" From 5b9aec5bae22608e6879bebdaa1be6a59315e95b Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Wed, 28 Feb 2024 08:49:54 -0600 Subject: [PATCH 07/47] Add `mutates_input=` and `returns_graph=` to `_dispatchable` (#7191) * Add `mutates_input=True` to `_dispatch` * Add `returns_graph=True` to `_dispatchable` * Don't auto-convert for functions that return Graphs; also, updates * more comments * Make `returns_graph` attribute private (for now) --- .../algorithms/approximation/steinertree.py | 4 +- .../approximation/traveling_salesman.py | 2 +- .../algorithms/approximation/treewidth.py | 6 +- networkx/algorithms/bipartite/edgelist.py | 4 +- networkx/algorithms/bipartite/generators.py | 16 +-- networkx/algorithms/bipartite/matrix.py | 2 +- networkx/algorithms/bipartite/projection.py | 12 +- networkx/algorithms/centrality/closeness.py | 2 +- networkx/algorithms/chordal.py | 2 +- networkx/algorithms/clique.py | 4 +- .../algorithms/coloring/equitable_coloring.py | 2 +- .../components/strongly_connected.py | 2 +- .../connectivity/edge_augmentation.py | 2 +- .../connectivity/edge_kcomponents.py | 2 +- networkx/algorithms/connectivity/utils.py | 4 +- networkx/algorithms/core.py | 10 +- networkx/algorithms/dag.py | 8 +- networkx/algorithms/distance_measures.py | 2 +- networkx/algorithms/euler.py | 2 +- networkx/algorithms/flow/boykovkolmogorov.py | 1 + networkx/algorithms/flow/dinitz_alg.py | 1 + networkx/algorithms/flow/edmondskarp.py | 2 + networkx/algorithms/flow/gomory_hu.py | 2 +- networkx/algorithms/flow/preflowpush.py | 1 + .../algorithms/flow/shortestaugmentingpath.py | 1 + networkx/algorithms/flow/utils.py | 2 +- networkx/algorithms/hybrid.py | 2 +- .../isomorphism/tree_isomorphism.py | 2 +- networkx/algorithms/minors/contraction.py | 10 +- networkx/algorithms/moral.py | 2 +- networkx/algorithms/operators/all.py | 8 +- networkx/algorithms/operators/binary.py | 14 +-- networkx/algorithms/operators/product.py | 18 +-- networkx/algorithms/operators/unary.py | 4 +- networkx/algorithms/planarity.py | 8 +- networkx/algorithms/regular.py | 2 +- networkx/algorithms/smallworld.py | 4 +- networkx/algorithms/sparsifiers.py | 2 +- networkx/algorithms/summarization.py | 6 +- networkx/algorithms/swap.py | 6 +- networkx/algorithms/threshold.py | 4 +- networkx/algorithms/tournament.py | 2 +- .../traversal/breadth_first_search.py | 2 +- .../traversal/depth_first_search.py | 2 +- networkx/algorithms/tree/branchings.py | 7 +- networkx/algorithms/tree/coding.py | 4 +- networkx/algorithms/tree/decomposition.py | 2 +- networkx/algorithms/tree/mst.py | 8 +- networkx/algorithms/tree/operations.py | 2 +- networkx/algorithms/triads.py | 4 +- networkx/convert.py | 6 +- networkx/convert_matrix.py | 8 +- networkx/drawing/nx_agraph.py | 4 +- networkx/drawing/nx_pydot.py | 4 +- networkx/generators/atlas.py | 4 +- networkx/generators/classic.py | 42 +++---- networkx/generators/cographs.py | 2 +- networkx/generators/community.py | 20 ++-- networkx/generators/degree_seq.py | 14 +-- networkx/generators/directed.py | 12 +- networkx/generators/duplication.py | 4 +- networkx/generators/ego.py | 2 +- networkx/generators/expanders.py | 10 +- networkx/generators/geometric.py | 14 +-- networkx/generators/harary_graph.py | 4 +- networkx/generators/internet_as_graphs.py | 2 +- networkx/generators/intersection.py | 6 +- networkx/generators/interval_graph.py | 2 +- networkx/generators/joint_degree_seq.py | 4 +- networkx/generators/lattice.py | 10 +- networkx/generators/line.py | 4 +- networkx/generators/mycielski.py | 4 +- networkx/generators/nonisomorphic_trees.py | 2 +- networkx/generators/random_clustered.py | 2 +- networkx/generators/random_graphs.py | 32 ++--- networkx/generators/small.py | 46 +++---- networkx/generators/social.py | 8 +- networkx/generators/spectral_graph_forge.py | 2 +- networkx/generators/stochastic.py | 4 +- networkx/generators/sudoku.py | 2 +- networkx/generators/time_series.py | 2 +- networkx/generators/trees.py | 18 +-- networkx/generators/triads.py | 2 +- networkx/readwrite/adjlist.py | 4 +- networkx/readwrite/edgelist.py | 6 +- networkx/readwrite/gexf.py | 2 +- networkx/readwrite/gml.py | 4 +- networkx/readwrite/graph6.py | 4 +- networkx/readwrite/graphml.py | 4 +- networkx/readwrite/json_graph/adjacency.py | 2 +- networkx/readwrite/json_graph/cytoscape.py | 2 +- networkx/readwrite/json_graph/node_link.py | 2 +- networkx/readwrite/json_graph/tree.py | 2 +- networkx/readwrite/leda.py | 4 +- networkx/readwrite/multiline_adjlist.py | 4 +- networkx/readwrite/p2g.py | 4 +- networkx/readwrite/pajek.py | 4 +- networkx/readwrite/sparse6.py | 4 +- networkx/relabel.py | 8 +- networkx/utils/backends.py | 113 ++++++++++++++++-- 100 files changed, 405 insertions(+), 293 deletions(-) diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 7751251ddf8d..af5916442b92 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -7,7 +7,7 @@ @not_implemented_for("directed") -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def metric_closure(G, weight="weight"): """Return the metric closure of a graph. @@ -126,7 +126,7 @@ def _remove_nonterminal_leaves(G, terminals): @not_implemented_for("directed") -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def steiner_tree(G, terminal_nodes, weight="weight", method=None): r"""Return an approximation to the minimum Steiner tree of a graph. diff --git a/networkx/algorithms/approximation/traveling_salesman.py b/networkx/algorithms/approximation/traveling_salesman.py index f2127e3066cd..f3fae97e8ef0 100644 --- a/networkx/algorithms/approximation/traveling_salesman.py +++ b/networkx/algorithms/approximation/traveling_salesman.py @@ -492,7 +492,7 @@ def asadpour_atsp(G, weight="weight", seed=None, source=None): return _shortcutting(circuit) -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def held_karp_ascent(G, weight="weight"): """ Minimizes the Held-Karp relaxation of the TSP for `G` diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py index c7c01a1cd4d3..31d73f636823 100644 --- a/networkx/algorithms/approximation/treewidth.py +++ b/networkx/algorithms/approximation/treewidth.py @@ -41,7 +41,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def treewidth_min_degree(G): """Returns a treewidth decomposition using the Minimum Degree heuristic. @@ -65,7 +65,7 @@ def treewidth_min_degree(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def treewidth_min_fill_in(G): """Returns a treewidth decomposition using the Minimum Fill-in heuristic. @@ -177,7 +177,7 @@ def min_fill_in_heuristic(graph): return min_fill_in_node -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def treewidth_decomp(G, heuristic=min_fill_in_heuristic): """Returns a treewidth decomposition using the passed heuristic. diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py index b9b2a82e05a8..70631ea0e099 100644 --- a/networkx/algorithms/bipartite/edgelist.py +++ b/networkx/algorithms/bipartite/edgelist.py @@ -146,7 +146,7 @@ def generate_edgelist(G, delimiter=" ", data=True): yield delimiter.join(map(str, edge)) -@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): @@ -268,7 +268,7 @@ def parse_edgelist( @open_file(0, mode="rb") -@nx._dispatchable(name="bipartite_read_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True) def read_edgelist( path, comments="#", diff --git a/networkx/algorithms/bipartite/generators.py b/networkx/algorithms/bipartite/generators.py index 2774c6fd4093..de6f07972394 100644 --- a/networkx/algorithms/bipartite/generators.py +++ b/networkx/algorithms/bipartite/generators.py @@ -20,7 +20,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number([0, 1]) def complete_bipartite_graph(n1, n2, create_using=None): """Returns the complete bipartite graph `K_{n_1,n_2}`. @@ -67,7 +67,7 @@ def complete_bipartite_graph(n1, n2, create_using=None): @py_random_state(3) -@nx._dispatchable(name="bipartite_configuration_model", graphs=None) +@nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True) def configuration_model(aseq, bseq, create_using=None, seed=None): """Returns a random bipartite graph from two given degree sequences. @@ -138,7 +138,7 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): return G -@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None) +@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True) def havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -213,7 +213,7 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -287,7 +287,7 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using an alternating Havel-Hakimi style construction. @@ -366,7 +366,7 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def preferential_attachment_graph(aseq, p, create_using=None, seed=None): """Create a bipartite graph with a preferential attachment model from a given single degree sequence. @@ -438,7 +438,7 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_graph(n, m, p, seed=None, directed=False): """Returns a bipartite random graph. @@ -525,7 +525,7 @@ def random_graph(n, m, p, seed=None, directed=False): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnmk_random_graph(n, m, k, seed=None, directed=False): """Returns a random bipartite graph G_{n,m,k}. diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py index 8809e21b62e1..462ef8a1311c 100644 --- a/networkx/algorithms/bipartite/matrix.py +++ b/networkx/algorithms/bipartite/matrix.py @@ -110,7 +110,7 @@ def biadjacency_matrix( raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): r"""Creates a new bipartite graph from a biadjacency matrix given as a SciPy sparse array. diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py index 55544c4acbc4..1eb71fa528f1 100644 --- a/networkx/algorithms/bipartite/projection.py +++ b/networkx/algorithms/bipartite/projection.py @@ -12,7 +12,9 @@ ] -@nx._dispatchable(graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True) +@nx._dispatchable( + graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True +) def projected_graph(B, nodes, multigraph=False): r"""Returns the projection of B onto one of its node sets. @@ -117,7 +119,7 @@ def projected_graph(B, nodes, multigraph=False): @not_implemented_for("multigraph") -@nx._dispatchable(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. @@ -218,7 +220,7 @@ def weighted_projected_graph(B, nodes, ratio=False): @not_implemented_for("multigraph") -@nx._dispatchable(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. @@ -312,7 +314,7 @@ def collaboration_weighted_projected_graph(B, nodes): @not_implemented_for("multigraph") -@nx._dispatchable(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. @@ -412,7 +414,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): @not_implemented_for("multigraph") -@nx._dispatchable(graphs="B", preserve_all_attrs=True) +@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True) def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py index 3527c468f33e..1c1722d4ed4c 100644 --- a/networkx/algorithms/centrality/closeness.py +++ b/networkx/algorithms/centrality/closeness.py @@ -137,7 +137,7 @@ def closeness_centrality(G, u=None, distance=None, wf_improved=True): @not_implemented_for("directed") -@nx._dispatchable +@nx._dispatchable(mutates_input=True) def incremental_closeness_centrality( G, edge, prev_cc=None, insertion=True, wf_improved=True ): diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index 875bf24d14e8..6bd3ccd2ea3e 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -369,7 +369,7 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): @not_implemented_for("directed") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def complete_to_chordal_graph(G): """Return a copy of G completed to a chordal graph diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py index 0b4d156e9003..5f959dd46582 100644 --- a/networkx/algorithms/clique.py +++ b/networkx/algorithms/clique.py @@ -412,7 +412,7 @@ def expand(subg, cand): return expand(subg_init, cand_init) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def make_max_clique_graph(G, create_using=None): """Returns the maximal clique graph of the given graph. @@ -460,7 +460,7 @@ def make_max_clique_graph(G, create_using=None): return B -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def make_clique_bipartite(G, fpos=None, create_using=None, name=None): """Returns the bipartite clique graph corresponding to `G`. diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py index 659609664c12..e464a0744704 100644 --- a/networkx/algorithms/coloring/equitable_coloring.py +++ b/networkx/algorithms/coloring/equitable_coloring.py @@ -112,7 +112,7 @@ def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L): X = Y -@nx._dispatchable +@nx._dispatchable(mutates_input=True) def pad_graph(G, num_colors): """Add a disconnected complete clique K_p such that the number of nodes in the graph becomes a multiple of `num_colors`. diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py index 5f89cb1278c7..febd1b9b5410 100644 --- a/networkx/algorithms/components/strongly_connected.py +++ b/networkx/algorithms/components/strongly_connected.py @@ -346,7 +346,7 @@ def is_strongly_connected(G): @not_implemented_for("undirected") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def condensation(G, scc=None): """Returns the condensation of G. diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py index 0e77a0bf683e..d095ed51917d 100644 --- a/networkx/algorithms/connectivity/edge_augmentation.py +++ b/networkx/algorithms/connectivity/edge_augmentation.py @@ -1040,7 +1040,7 @@ def _minimum_rooted_branching(D, root): return A -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def collapse(G, grouped_nodes): """Collapses each group of nodes into a single node. diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index 9dcdf71a2e52..e071f4d3df81 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -503,7 +503,7 @@ def _high_degree_components(G, k): yield from nx.connected_components(H) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def general_k_edge_subgraphs(G, k): """General algorithm to find all maximal k-edge-connected subgraphs in `G`. diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py index cd3dadf61da6..a4d822ae5232 100644 --- a/networkx/algorithms/connectivity/utils.py +++ b/networkx/algorithms/connectivity/utils.py @@ -6,7 +6,7 @@ __all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def build_auxiliary_node_connectivity(G): r"""Creates a directed graph D from an undirected graph G to compute flow based node connectivity. @@ -59,7 +59,7 @@ def build_auxiliary_node_connectivity(G): return H -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def build_auxiliary_edge_connectivity(G): """Auxiliary digraph for computing flow based edge connectivity diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 7ab7598260c0..511e6d9d0bae 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -148,7 +148,7 @@ def _core_subgraph(G, k_filter, k=None, core=None): return G.subgraph(nodes).copy() -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_core(G, k=None, core_number=None): """Returns the k-core of G. @@ -224,7 +224,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_shell(G, k=None, core_number=None): """Returns the k-shell of G. @@ -306,7 +306,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_crust(G, k=None, core_number=None): """Returns the k-crust of G. @@ -389,7 +389,7 @@ def k_crust(G, k=None, core_number=None): return G.subgraph(nodes).copy() -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_corona(G, k, core_number=None): """Returns the k-corona of G. @@ -468,7 +468,7 @@ def func(v, k, c): @nx.utils.not_implemented_for("directed") @nx.utils.not_implemented_for("multigraph") -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_truss(G, k): """Returns the k-truss of `G`. diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index 04c76424feb1..a70e2c5efafe 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -665,7 +665,7 @@ def is_aperiodic(G): return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels))) -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def transitive_closure(G, reflexive=False): """Returns transitive closure of a graph @@ -758,7 +758,7 @@ def transitive_closure(G, reflexive=False): @not_implemented_for("undirected") -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def transitive_closure_dag(G, topo_order=None): """Returns the transitive closure of a directed acyclic graph. @@ -815,7 +815,7 @@ def transitive_closure_dag(G, topo_order=None): @not_implemented_for("undirected") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def transitive_reduction(G): """Returns transitive reduction of a directed graph @@ -1124,7 +1124,7 @@ def root_to_leaf_paths(G): @not_implemented_for("multigraph") @not_implemented_for("undirected") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def dag_to_branching(G): """Returns a branching representing all (overlapping) paths from root nodes to leaf nodes in the given directed acyclic graph. diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 423d9b6dee55..310c3913f01a 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -553,7 +553,7 @@ def center(G, e=None, usebounds=False, weight=None): return p -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2}) def barycenter(G, weight=None, attr=None, sp=None): r"""Calculate barycenter of a connected graph, optionally with edge weights. diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py index 54ab9bcff097..2e0e57358aa9 100644 --- a/networkx/algorithms/euler.py +++ b/networkx/algorithms/euler.py @@ -386,7 +386,7 @@ def eulerian_path(G, source=None, keys=False): @not_implemented_for("directed") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def eulerize(G): """Transforms a graph into an Eulerian graph. diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py index c75bbba69cac..e1c9486f5276 100644 --- a/networkx/algorithms/flow/boykovkolmogorov.py +++ b/networkx/algorithms/flow/boykovkolmogorov.py @@ -15,6 +15,7 @@ edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def boykov_kolmogorov( G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None diff --git a/networkx/algorithms/flow/dinitz_alg.py b/networkx/algorithms/flow/dinitz_alg.py index b40fd78351fb..31c1a5e2a1cb 100644 --- a/networkx/algorithms/flow/dinitz_alg.py +++ b/networkx/algorithms/flow/dinitz_alg.py @@ -15,6 +15,7 @@ edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None): """Find a maximum single-commodity flow using Dinitz' algorithm. diff --git a/networkx/algorithms/flow/edmondskarp.py b/networkx/algorithms/flow/edmondskarp.py index 6fa76d7cabc2..92d79f181f20 100644 --- a/networkx/algorithms/flow/edmondskarp.py +++ b/networkx/algorithms/flow/edmondskarp.py @@ -12,6 +12,7 @@ graphs="R", preserve_edge_attrs={"R": {"capacity": float("inf"), "flow": 0}}, preserve_graph_attrs=True, + mutates_input=True, ) def edmonds_karp_core(R, s, t, cutoff): """Implementation of the Edmonds-Karp algorithm.""" @@ -127,6 +128,7 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def edmonds_karp( G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py index 802c9621d805..951abaeb5178 100644 --- a/networkx/algorithms/flow/gomory_hu.py +++ b/networkx/algorithms/flow/gomory_hu.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatchable(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) def gomory_hu_tree(G, capacity="capacity", flow_func=None): r"""Returns the Gomory-Hu tree of an undirected graph G. diff --git a/networkx/algorithms/flow/preflowpush.py b/networkx/algorithms/flow/preflowpush.py index 50f6c8503a70..5afa548060c8 100644 --- a/networkx/algorithms/flow/preflowpush.py +++ b/networkx/algorithms/flow/preflowpush.py @@ -293,6 +293,7 @@ def global_relabel(from_sink): edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def preflow_push( G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False diff --git a/networkx/algorithms/flow/shortestaugmentingpath.py b/networkx/algorithms/flow/shortestaugmentingpath.py index ba50f7acb0c8..c2583d16646a 100644 --- a/networkx/algorithms/flow/shortestaugmentingpath.py +++ b/networkx/algorithms/flow/shortestaugmentingpath.py @@ -168,6 +168,7 @@ def relabel(u): edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def shortest_augmenting_path( G, diff --git a/networkx/algorithms/flow/utils.py b/networkx/algorithms/flow/utils.py index 20ee78e0ae6d..dcb663f3b644 100644 --- a/networkx/algorithms/flow/utils.py +++ b/networkx/algorithms/flow/utils.py @@ -72,7 +72,7 @@ def clear_work(self): self._work = 0 -@nx._dispatchable(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) def build_residual_network(G, capacity): """Build a residual network and initialize a zero flow. diff --git a/networkx/algorithms/hybrid.py b/networkx/algorithms/hybrid.py index c98774e002e7..4d0e5c36e801 100644 --- a/networkx/algorithms/hybrid.py +++ b/networkx/algorithms/hybrid.py @@ -10,7 +10,7 @@ __all__ = ["kl_connected_subgraph", "is_kl_connected"] -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): """Returns the maximum locally `(k, l)`-connected subgraph of `G`. diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py index 71a5890a116e..e409d515f1ce 100644 --- a/networkx/algorithms/isomorphism/tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -24,7 +24,7 @@ __all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] -@nx._dispatchable(graphs={"t1": 0, "t2": 2}) +@nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True) def root_trees(t1, root1, t2, root2): """Create a single digraph dT of free trees t1 and t2 # with roots root1 and root2 respectively diff --git a/networkx/algorithms/minors/contraction.py b/networkx/algorithms/minors/contraction.py index ed29a7ac7c56..9f4d89fa8029 100644 --- a/networkx/algorithms/minors/contraction.py +++ b/networkx/algorithms/minors/contraction.py @@ -95,7 +95,7 @@ def equivalence_classes(iterable, relation): return {frozenset(block) for block in blocks} -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def quotient_graph( G, partition, @@ -424,7 +424,9 @@ def edge_data(b, c): return H -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 4}, returns_graph=True +) def contracted_nodes(G, u, v, self_loops=True, copy=True): """Returns the graph that results from contracting `u` and `v`. @@ -559,7 +561,9 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True): identified_nodes = contracted_nodes -@nx._dispatchable(preserve_edge_attrs=True) +@nx._dispatchable( + preserve_edge_attrs=True, mutates_input={"not copy": 3}, returns_graph=True +) def contracted_edge(G, edge, self_loops=True, copy=True): """Returns the graph that results from contracting the specified edge. diff --git a/networkx/algorithms/moral.py b/networkx/algorithms/moral.py index 8532f2b261f1..e2acf80f6c37 100644 --- a/networkx/algorithms/moral.py +++ b/networkx/algorithms/moral.py @@ -9,7 +9,7 @@ @not_implemented_for("undirected") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def moral_graph(G): r"""Return the Moral Graph diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py index e6f4d60b040a..ba1304b6c4f6 100644 --- a/networkx/algorithms/operators/all.py +++ b/networkx/algorithms/operators/all.py @@ -7,7 +7,7 @@ __all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] -@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def union_all(graphs, rename=()): """Returns the union of all graphs. @@ -110,7 +110,7 @@ def label(x): return R -@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def disjoint_union_all(graphs): """Returns the disjoint union of all graphs. @@ -164,7 +164,7 @@ def yield_relabeled(graphs): return R -@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def compose_all(graphs): """Returns the composition of all graphs. @@ -230,7 +230,7 @@ def compose_all(graphs): return R -@nx._dispatchable(graphs="[graphs]") +@nx._dispatchable(graphs="[graphs]", returns_graph=True) def intersection_all(graphs): """Returns a new graph that contains only the nodes and the edges that exist in all graphs. diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py index 2b69aaf084da..0ca3a7b6bd2c 100644 --- a/networkx/algorithms/operators/binary.py +++ b/networkx/algorithms/operators/binary.py @@ -15,7 +15,7 @@ _G_H = {"G": 0, "H": 1} -@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def union(G, H, rename=()): """Combine graphs G and H. The names of nodes must be unique. @@ -71,7 +71,7 @@ def union(G, H, rename=()): return nx.union_all([G, H], rename) -@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def disjoint_union(G, H): """Combine graphs G and H. The nodes are assumed to be unique (disjoint). @@ -125,7 +125,7 @@ def disjoint_union(G, H): return nx.disjoint_union_all([G, H]) -@nx._dispatchable(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def intersection(G, H): """Returns a new graph that contains only the nodes and the edges that exist in both G and H. @@ -170,7 +170,7 @@ def intersection(G, H): return nx.intersection_all([G, H]) -@nx._dispatchable(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def difference(G, H): """Returns a new graph that contains the edges that exist in G but not in H. @@ -225,7 +225,7 @@ def difference(G, H): return R -@nx._dispatchable(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def symmetric_difference(G, H): """Returns new graph with edges that exist in either G or H but not both. @@ -288,7 +288,7 @@ def symmetric_difference(G, H): return R -@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def compose(G, H): """Compose graph G with H by combining nodes and edges into a single graph. @@ -369,7 +369,7 @@ def compose(G, H): return nx.compose_all([G, H]) -@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def full_join(G, H, rename=(None, None)): """Returns the full join of graphs G and H. diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index 07bebdaa4b4b..dc3427004807 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -124,7 +124,7 @@ def _init_product_graph(G, H): return GH -@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def tensor_product(G, H): r"""Returns the tensor product of G and H. @@ -180,7 +180,7 @@ def tensor_product(G, H): return GH -@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def cartesian_product(G, H): r"""Returns the Cartesian product of G and H. @@ -232,7 +232,7 @@ def cartesian_product(G, H): return GH -@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def lexicographic_product(G, H): r"""Returns the lexicographic product of G and H. @@ -285,7 +285,7 @@ def lexicographic_product(G, H): return GH -@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def strong_product(G, H): r"""Returns the strong product of G and H. @@ -343,7 +343,7 @@ def strong_product(G, H): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def power(G, k): """Returns the specified power of a graph. @@ -432,7 +432,7 @@ def power(G, k): @not_implemented_for("multigraph") -@nx._dispatchable(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def rooted_product(G, H, root): """Return the rooted product of graphs G and H rooted at root in H. @@ -472,7 +472,7 @@ def rooted_product(G, H, root): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def corona_product(G, H): r"""Returns the Corona product of G and H. @@ -535,7 +535,9 @@ def corona_product(G, H): return GH -@nx._dispatchable(graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True) +@nx._dispatchable( + graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True, returns_graph=True +) def modular_product(G, H): r"""Returns the Modular product of G and H. diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py index 9b1480a80127..64be249f634b 100644 --- a/networkx/algorithms/operators/unary.py +++ b/networkx/algorithms/operators/unary.py @@ -4,7 +4,7 @@ __all__ = ["complement", "reverse"] -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def complement(G): """Returns the graph complement of G. @@ -40,7 +40,7 @@ def complement(G): return R -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def reverse(G, copy=True): """Returns the reverse directed graph of G. diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index db494481cd1f..cfbb36a5c5ec 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -38,7 +38,7 @@ def is_planar(G): return check_planarity(G, counterexample=False)[0] -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def check_planarity(G, counterexample=False): """Check if a graph is planar and return a counterexample or an embedding. @@ -114,7 +114,7 @@ def check_planarity(G, counterexample=False): return True, embedding -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def check_planarity_recursive(G, counterexample=False): """Recursive version of :meth:`check_planarity`.""" planarity_state = LRPlanarity(G) @@ -130,7 +130,7 @@ def check_planarity_recursive(G, counterexample=False): return True, embedding -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def get_counterexample(G): """Obtains a Kuratowski subgraph. @@ -169,7 +169,7 @@ def get_counterexample(G): return subgraph -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def get_counterexample_recursive(G): """Recursive version of :meth:`get_counterexample`.""" diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py index 01027cd8677e..058ad3654577 100644 --- a/networkx/algorithms/regular.py +++ b/networkx/algorithms/regular.py @@ -71,7 +71,7 @@ def is_k_regular(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) def k_factor(G, k, matching_weight="weight"): """Compute a k-factor of G diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py index 1c89e243e7d8..8ddf74887f24 100644 --- a/networkx/algorithms/smallworld.py +++ b/networkx/algorithms/smallworld.py @@ -23,7 +23,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def random_reference(G, niter=1, connectivity=True, seed=None): """Compute a random graph by swapping edges of a given graph. @@ -121,7 +121,7 @@ def random_reference(G, niter=1, connectivity=True, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(4) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): """Latticize the given graph by swapping edges. diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py index fedc16cdd2d9..870b7ba6f37d 100644 --- a/networkx/algorithms/sparsifiers.py +++ b/networkx/algorithms/sparsifiers.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def spanner(G, stretch, weight=None, seed=None): """Returns a spanner of the given graph with the given stretch. diff --git a/networkx/algorithms/summarization.py b/networkx/algorithms/summarization.py index bdabbaaf039c..794a77ab34a6 100644 --- a/networkx/algorithms/summarization.py +++ b/networkx/algorithms/summarization.py @@ -65,7 +65,7 @@ __all__ = ["dedensify", "snap_aggregation"] -@nx._dispatchable +@nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True) def dedensify(G, threshold, prefix=None, copy=True): """Compresses neighborhoods around high-degree nodes @@ -404,7 +404,9 @@ def _snap_split(groups, neighbor_info, group_lookup, group_id): return groups -@nx._dispatchable(node_attrs="[node_attributes]", edge_attrs="[edge_attributes]") +@nx._dispatchable( + node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True +) def snap_aggregation( G, node_attributes, diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index 6c4b698f1ad1..c190f970b6f3 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -11,7 +11,7 @@ @nx.utils.not_implemented_for("undirected") @py_random_state(3) -@nx._dispatchable +@nx._dispatchable(mutates_input=True, returns_graph=True) def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): """Swap three edges in a directed graph while keeping the node degrees fixed. @@ -133,7 +133,7 @@ def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatchable +@nx._dispatchable(mutates_input=True, returns_graph=True) def double_edge_swap(G, nswap=1, max_tries=100, seed=None): """Swap two edges in the graph while keeping the node degrees fixed. @@ -231,7 +231,7 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatchable +@nx._dispatchable(mutates_input=True) def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None): """Attempts the specified number of double-edge swaps in the graph `G`. diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py index 374e9693ecf2..bcc03d106212 100644 --- a/networkx/algorithms/threshold.py +++ b/networkx/algorithms/threshold.py @@ -301,7 +301,7 @@ def weights_to_creation_sequence( # Manipulating NetworkX.Graphs in context of threshold graphs -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def threshold_graph(creation_sequence, create_using=None): """ Create a threshold graph from the creation sequence or compact @@ -369,7 +369,7 @@ def find_alternating_4_cycle(G): return False -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def find_threshold_graph(G, create_using=None): """ Returns a threshold subgraph that is close to largest in `G`. diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py index 3cf655f40ff8..43a71faa70ef 100644 --- a/networkx/algorithms/tournament.py +++ b/networkx/algorithms/tournament.py @@ -151,7 +151,7 @@ def hamiltonian_path(G): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_tournament(n, seed=None): r"""Returns a random tournament graph on `n` nodes. diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index d7aba232dde2..b87cca124a67 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -218,7 +218,7 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): yield from generic_bfs_edges(G, source, successors, depth_limit) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Returns an oriented tree constructed from of a breadth-first-search starting at source. diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 8a06a96c7acd..3ca0f91d9649 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -111,7 +111,7 @@ def dfs_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): depth_now -= 1 -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def dfs_tree(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns oriented tree constructed from a depth-first-search from source. diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py index e6c84051d8be..34593ea4100e 100644 --- a/networkx/algorithms/tree/branchings.py +++ b/networkx/algorithms/tree/branchings.py @@ -107,7 +107,7 @@ def branching_weight(G, attr="weight", default=1): @py_random_state(4) -@nx._dispatchable(edge_attrs={"attr": "default"}) +@nx._dispatchable(edge_attrs={"attr": "default"}, returns_graph=True) def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): """ Returns a branching obtained through a greedy algorithm. @@ -746,6 +746,7 @@ def is_root(G, u, edgekeys): @nx._dispatchable( edge_attrs={"attr": "default", "partition": 0}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def maximum_branching( G, @@ -1174,6 +1175,7 @@ def is_root(G, u, edgekeys): @nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimum_branching( G, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1195,6 +1197,7 @@ def minimum_branching( @nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimal_branching( G, /, *, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1259,6 +1262,7 @@ def minimal_branching( @nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def maximum_spanning_arborescence( G, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1301,6 +1305,7 @@ def maximum_spanning_arborescence( @nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimum_spanning_arborescence( G, attr="weight", default=1, preserve_attrs=False, partition=None diff --git a/networkx/algorithms/tree/coding.py b/networkx/algorithms/tree/coding.py index 55344dbcbcff..8cec023c2286 100644 --- a/networkx/algorithms/tree/coding.py +++ b/networkx/algorithms/tree/coding.py @@ -128,7 +128,7 @@ def _make_tuple(T, root, _parent): return _make_tuple(T, root, None) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_nested_tuple(sequence, sensible_relabeling=False): """Returns the rooted tree corresponding to the given nested tuple. @@ -314,7 +314,7 @@ def parents(u): return result -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_prufer_sequence(sequence): r"""Returns the tree corresponding to the given Prüfer sequence. diff --git a/networkx/algorithms/tree/decomposition.py b/networkx/algorithms/tree/decomposition.py index a649288ffe8a..c8b8f2477b47 100644 --- a/networkx/algorithms/tree/decomposition.py +++ b/networkx/algorithms/tree/decomposition.py @@ -10,7 +10,7 @@ @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def junction_tree(G): r"""Returns a junction tree of a given graph. diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index 8b76bc470aba..850536edf6cd 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -555,7 +555,7 @@ def maximum_spanning_edges( ) -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a minimum spanning tree or forest on an undirected graph `G`. @@ -615,7 +615,7 @@ def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa return T -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def partition_spanning_tree( G, minimum=True, weight="weight", partition="partition", ignore_nan=False ): @@ -679,7 +679,7 @@ def partition_spanning_tree( return T -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a maximum spanning tree or forest on an undirected graph `G`. @@ -743,7 +743,7 @@ def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa @py_random_state(3) -@nx._dispatchable(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None): """ Sample a random spanning tree using the edges weights of `G`. diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py index 14b7c232bcfb..f4368d6a322a 100644 --- a/networkx/algorithms/tree/operations.py +++ b/networkx/algorithms/tree/operations.py @@ -32,7 +32,7 @@ def join(rooted_trees, label_attribute=None): # Argument types don't match dispatching, but allow manual selection of backend -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def join_trees(rooted_trees, *, label_attribute=None, first_label=0): """Returns a new rooted tree made by joining `rooted_trees` diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py index 74b2f51b9dbc..1e67c145362b 100644 --- a/networkx/algorithms/triads.py +++ b/networkx/algorithms/triads.py @@ -354,7 +354,7 @@ def all_triplets(G): @not_implemented_for("undirected") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def all_triads(G): """A generator of all possible triads in G. @@ -547,7 +547,7 @@ def triad_type(G): @not_implemented_for("undirected") @py_random_state(1) -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def random_triad(G, seed=None): """Returns a random triad from a directed graph. diff --git a/networkx/convert.py b/networkx/convert.py index af2847aef0c8..7cc8fe401261 100644 --- a/networkx/convert.py +++ b/networkx/convert.py @@ -202,7 +202,7 @@ def to_dict_of_lists(G, nodelist=None): return d -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_dict_of_lists(d, create_using=None): """Returns a graph from a dictionary of lists. @@ -362,7 +362,7 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): return dod -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_dict_of_dicts(d, create_using=None, multigraph_input=False): """Returns a graph from a dictionary of dictionaries. @@ -467,7 +467,7 @@ def to_edgelist(G, nodelist=None): return G.edges(nodelist, data=True) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_edgelist(edgelist, create_using=None): """Returns a graph from a list of edges. diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index 3e9ed1f8b468..f669a09ca5d2 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -150,7 +150,7 @@ def to_pandas_adjacency( return pd.DataFrame(data=M, index=nodelist, columns=nodelist) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pandas_adjacency(df, create_using=None): r"""Returns a graph from Pandas DataFrame. @@ -311,7 +311,7 @@ def to_pandas_edgelist( return pd.DataFrame(edgelistdict, dtype=dtype) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pandas_edgelist( df, source="source", @@ -675,7 +675,7 @@ def _generate_weighted_edges(A): return _coo_gen_triples(A.tocoo()) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_scipy_sparse_array( A, parallel_edges=False, create_using=None, edge_attribute="weight" ): @@ -1017,7 +1017,7 @@ def to_numpy_array( return A -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_numpy_array(A, parallel_edges=False, create_using=None, edge_attr="weight"): """Returns a graph from a 2D NumPy array. diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py index 1682c62dcf16..f91031fcae6a 100644 --- a/networkx/drawing/nx_agraph.py +++ b/networkx/drawing/nx_agraph.py @@ -33,7 +33,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_agraph(A, create_using=None): """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. @@ -203,7 +203,7 @@ def write_dot(G, path): return -@nx._dispatchable(name="agraph_read_dot", graphs=None) +@nx._dispatchable(name="agraph_read_dot", graphs=None, returns_graph=True) def read_dot(path): """Returns a NetworkX graph from a dot file on path. diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 77ae72788634..92c5f333e1ce 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -46,7 +46,7 @@ def write_dot(G, path): @open_file(0, mode="r") -@nx._dispatchable(name="pydot_read_dot", graphs=None) +@nx._dispatchable(name="pydot_read_dot", graphs=None, returns_graph=True) def read_dot(path): """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the dot file with the passed path. @@ -80,7 +80,7 @@ def read_dot(path): return from_pydot(P_list[0]) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pydot(P): """Returns a NetworkX graph from a Pydot graph. diff --git a/networkx/generators/atlas.py b/networkx/generators/atlas.py index 0a34bc466b8f..262443628c8f 100644 --- a/networkx/generators/atlas.py +++ b/networkx/generators/atlas.py @@ -88,7 +88,7 @@ def _generate_graphs(): yield G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def graph_atlas(i): """Returns graph number `i` from the Graph Atlas. @@ -127,7 +127,7 @@ def graph_atlas(i): return next(islice(_generate_graphs(), i, None)) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def graph_atlas_g(): """Returns the list of all graphs with up to seven nodes named in the Graph Atlas. diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 51ff86c1da32..54088be7519f 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -66,7 +66,7 @@ def _tree_edges(n, r): break -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def full_rary_tree(r, n, create_using=None): """Creates a full r-ary tree of `n` nodes. @@ -104,7 +104,7 @@ def full_rary_tree(r, n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def kneser_graph(n, k): """Returns the Kneser Graph with parameters `n` and `k`. @@ -151,7 +151,7 @@ def kneser_graph(n, k): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def balanced_tree(r, h, create_using=None): """Returns the perfectly balanced `r`-ary tree of height `h`. @@ -201,7 +201,7 @@ def balanced_tree(r, h, create_using=None): return full_rary_tree(r, n, create_using=create_using) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def barbell_graph(m1, m2, create_using=None): """Returns the Barbell Graph: two complete graphs connected by a path. @@ -274,7 +274,7 @@ def barbell_graph(m1, m2, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def binomial_tree(n, create_using=None): """Returns the Binomial Tree of order n. @@ -312,7 +312,7 @@ def binomial_tree(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def complete_graph(n, create_using=None): """Return the complete graph `K_n` with n nodes. @@ -360,7 +360,7 @@ def complete_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def circular_ladder_graph(n, create_using=None): """Returns the circular ladder graph $CL_n$ of length n. @@ -380,7 +380,7 @@ def circular_ladder_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def circulant_graph(n, offsets, create_using=None): r"""Returns the circulant graph $Ci_n(x_1, x_2, ..., x_m)$ with $n$ nodes. @@ -455,7 +455,7 @@ def circulant_graph(n, offsets, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def cycle_graph(n, create_using=None): """Returns the cycle graph $C_n$ of cyclically connected nodes. @@ -487,7 +487,7 @@ def cycle_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def dorogovtsev_goltsev_mendes_graph(n, create_using=None): """Returns the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph. @@ -549,7 +549,7 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def empty_graph(n=0, create_using=None, default=Graph): """Returns the empty graph with n nodes and zero edges. @@ -648,7 +648,7 @@ def empty_graph(n=0, create_using=None, default=Graph): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def ladder_graph(n, create_using=None): """Returns the Ladder graph of length n. @@ -671,7 +671,7 @@ def ladder_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number([0, 1]) def lollipop_graph(m, n, create_using=None): """Returns the Lollipop Graph; ``K_m`` connected to ``P_n``. @@ -738,7 +738,7 @@ def lollipop_graph(m, n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def null_graph(create_using=None): """Returns the Null graph with no nodes or edges. @@ -749,7 +749,7 @@ def null_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def path_graph(n, create_using=None): """Returns the Path graph `P_n` of linearly connected nodes. @@ -775,7 +775,7 @@ def path_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def star_graph(n, create_using=None): """Return the star graph @@ -814,7 +814,7 @@ def star_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number([0, 1]) def tadpole_graph(m, n, create_using=None): """Returns the (m,n)-tadpole graph; ``C_m`` connected to ``P_n``. @@ -876,7 +876,7 @@ def tadpole_graph(m, n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def trivial_graph(create_using=None): """Return the Trivial graph with one node (with label 0) and no edges. @@ -889,7 +889,7 @@ def trivial_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def turan_graph(n, r): r"""Return the Turan Graph @@ -927,7 +927,7 @@ def turan_graph(n, r): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number(0) def wheel_graph(n, create_using=None): """Return the wheel graph @@ -963,7 +963,7 @@ def wheel_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def complete_multipartite_graph(*subset_sizes): """Returns the complete multipartite graph with the specified subset sizes. diff --git a/networkx/generators/cographs.py b/networkx/generators/cographs.py index 50f38ed76bf0..388a42ac5296 100644 --- a/networkx/generators/cographs.py +++ b/networkx/generators/cographs.py @@ -18,7 +18,7 @@ @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_cograph(n, seed=None): r"""Returns a random cograph with $2 ^ n$ nodes. diff --git a/networkx/generators/community.py b/networkx/generators/community.py index 5a3d31c6833e..5076d728840f 100644 --- a/networkx/generators/community.py +++ b/networkx/generators/community.py @@ -19,7 +19,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def caveman_graph(l, k): """Returns a caveman graph of `l` cliques of size `k`. @@ -66,7 +66,7 @@ def caveman_graph(l, k): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def connected_caveman_graph(l, k): """Returns a connected caveman graph of `l` cliques of size `k`. @@ -121,7 +121,7 @@ def connected_caveman_graph(l, k): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def relaxed_caveman_graph(l, k, p, seed=None): """Returns a relaxed caveman graph. @@ -173,7 +173,7 @@ def relaxed_caveman_graph(l, k, p, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): """Returns the random partition graph with a partition of sizes. @@ -252,7 +252,7 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): """Returns the planted l-partition graph. @@ -308,7 +308,7 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): @py_random_state(6) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): """Generate a Gaussian random partition graph. @@ -384,7 +384,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=N return random_partition_graph(sizes, p_in, p_out, seed=seed, directed=directed) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def ring_of_cliques(num_cliques, clique_size): """Defines a "ring of cliques" graph. @@ -440,7 +440,7 @@ def ring_of_cliques(num_cliques, clique_size): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def windmill_graph(n, k): """Generate a windmill graph. A windmill graph is a graph of `n` cliques each of size `k` that are all @@ -494,7 +494,7 @@ def windmill_graph(n, k): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def stochastic_block_model( sizes, p, nodelist=None, seed=None, directed=False, selfloops=False, sparse=True ): @@ -806,7 +806,7 @@ def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): @py_random_state(11) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def LFR_benchmark_graph( n, tau1, diff --git a/networkx/generators/degree_seq.py b/networkx/generators/degree_seq.py index 0e9414dd8813..6bc85e2be616 100644 --- a/networkx/generators/degree_seq.py +++ b/networkx/generators/degree_seq.py @@ -124,7 +124,7 @@ def _configuration_model( @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def configuration_model(deg_sequence, create_using=None, seed=None): """Returns a random graph with the given degree sequence. @@ -227,7 +227,7 @@ def configuration_model(deg_sequence, create_using=None, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def directed_configuration_model( in_degree_sequence, out_degree_sequence, create_using=None, seed=None ): @@ -330,7 +330,7 @@ def directed_configuration_model( @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def expected_degree_graph(w, seed=None, selfloops=True): r"""Returns a random graph with given expected degrees. @@ -439,7 +439,7 @@ def expected_degree_graph(w, seed=None, selfloops=True): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def havel_hakimi_graph(deg_sequence, create_using=None): """Returns a simple graph with given degree sequence constructed using the Havel-Hakimi algorithm. @@ -532,7 +532,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): """Returns a directed graph with the given degree sequences. @@ -644,7 +644,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using= return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def degree_sequence_tree(deg_sequence, create_using=None): """Make a tree for the given degree sequence. @@ -691,7 +691,7 @@ def degree_sequence_tree(deg_sequence, create_using=None): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_degree_sequence_graph(sequence, seed=None, tries=10): r"""Returns a simple random graph with the given degree sequence. diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py index 38ddc80f8995..4548726b9fe2 100644 --- a/networkx/generators/directed.py +++ b/networkx/generators/directed.py @@ -21,7 +21,7 @@ @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gn_graph(n, kernel=None, create_using=None, seed=None): """Returns the growing network (GN) digraph with `n` nodes. @@ -89,7 +89,7 @@ def kernel(x): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnr_graph(n, p, create_using=None, seed=None): """Returns the growing network with redirection (GNR) digraph with `n` nodes and redirection probability `p`. @@ -143,7 +143,7 @@ def gnr_graph(n, p, create_using=None, seed=None): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnc_graph(n, create_using=None, seed=None): """Returns the growing network with copying (GNC) digraph with `n` nodes. @@ -183,7 +183,7 @@ def gnc_graph(n, create_using=None, seed=None): @py_random_state(6) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def scale_free_graph( n, alpha=0.41, @@ -334,7 +334,7 @@ def _choose_node(candidates, node_list, delta): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, seed=None): """Returns a random `k`-out graph with uniform attachment. @@ -415,7 +415,7 @@ def sample(v, nodes): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): """Returns a random `k`-out graph with preferential attachment. diff --git a/networkx/generators/duplication.py b/networkx/generators/duplication.py index 6a2e5d57e079..ad8262382ce3 100644 --- a/networkx/generators/duplication.py +++ b/networkx/generators/duplication.py @@ -13,7 +13,7 @@ @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def partial_duplication_graph(N, n, p, q, seed=None): """Returns a random graph using the partial duplication model. @@ -88,7 +88,7 @@ def partial_duplication_graph(N, n, p, q, seed=None): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def duplication_divergence_graph(n, p, seed=None): """Returns an undirected graph using the duplication-divergence model. diff --git a/networkx/generators/ego.py b/networkx/generators/ego.py index 9803783ae1e7..d959a1bd1d02 100644 --- a/networkx/generators/ego.py +++ b/networkx/generators/ego.py @@ -6,7 +6,7 @@ import networkx as nx -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): """Returns induced subgraph of neighbors centered at node n within a given radius. diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index cc03500a4702..85716e0e7a05 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -47,7 +47,7 @@ # (x, (y + (2*x + 1)) % n), # (x, (y + (2*x + 2)) % n), # -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def margulis_gabber_galil_graph(n, create_using=None): r"""Returns the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes. @@ -90,7 +90,7 @@ def margulis_gabber_galil_graph(n, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def chordal_cycle_graph(p, create_using=None): """Returns the chordal cycle graph on `p` nodes. @@ -154,7 +154,7 @@ def chordal_cycle_graph(p, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def paley_graph(p, create_using=None): r"""Returns the Paley $\frac{(p-1)}{2}$ -regular graph on $p$ nodes. @@ -214,7 +214,7 @@ def paley_graph(p, create_using=None): @nx.utils.decorators.np_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): r"""Utility for creating a random regular expander. @@ -399,7 +399,7 @@ def is_regular_expander(G, *, epsilon=0): @nx.utils.decorators.np_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_regular_expander_graph( n, d, *, epsilon=0, create_using=None, max_tries=100, seed=None ): diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py index 468b2d36c8b3..bb7fbd76d998 100644 --- a/networkx/generators/geometric.py +++ b/networkx/generators/geometric.py @@ -113,7 +113,7 @@ def _geometric_edges(G, radius, p, pos_name): @py_random_state(5) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_geometric_graph( n, radius, dim=2, pos=None, p=2, seed=None, *, pos_name="pos" ): @@ -207,7 +207,7 @@ def random_geometric_graph( @py_random_state(6) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def soft_random_geometric_graph( n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None, *, pos_name="pos" ): @@ -337,7 +337,7 @@ def should_join(edge): @py_random_state(7) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def geographical_threshold_graph( n, theta, @@ -504,7 +504,7 @@ def should_join(pair): @py_random_state(6) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def waxman_graph( n, beta=0.4, @@ -637,7 +637,7 @@ def should_join(pair): @py_random_state(5) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): r"""Returns a navigable small-world graph. @@ -710,7 +710,7 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): @py_random_state(7) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def thresholded_random_geometric_graph( n, radius, @@ -850,7 +850,7 @@ def thresholded_random_geometric_graph( @py_random_state(5) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def geometric_soft_configuration_graph( *, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None ): diff --git a/networkx/generators/harary_graph.py b/networkx/generators/harary_graph.py index 250f98985d63..591587d3aca6 100644 --- a/networkx/generators/harary_graph.py +++ b/networkx/generators/harary_graph.py @@ -21,7 +21,7 @@ __all__ = ["hnm_harary_graph", "hkn_harary_graph"] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def hnm_harary_graph(n, m, create_using=None): """Returns the Harary graph with given numbers of nodes and edges. @@ -113,7 +113,7 @@ def hnm_harary_graph(n, m, create_using=None): return H -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def hkn_harary_graph(k, n, create_using=None): """Returns the Harary graph with given node connectivity and node number. diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py index ad45b23d93e6..449d54376af4 100644 --- a/networkx/generators/internet_as_graphs.py +++ b/networkx/generators/internet_as_graphs.py @@ -397,7 +397,7 @@ def generate(self): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_internet_as_graph(n, seed=None): """Generates a random undirected graph resembling the Internet AS network diff --git a/networkx/generators/intersection.py b/networkx/generators/intersection.py index be88448f0667..2ed3a5fa3107 100644 --- a/networkx/generators/intersection.py +++ b/networkx/generators/intersection.py @@ -12,7 +12,7 @@ @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def uniform_random_intersection_graph(n, m, p, seed=None): """Returns a uniform random intersection graph. @@ -48,7 +48,7 @@ def uniform_random_intersection_graph(n, m, p, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def k_random_intersection_graph(n, m, k, seed=None): """Returns a intersection graph with randomly chosen attribute sets for each node that are of equal size (k). @@ -84,7 +84,7 @@ def k_random_intersection_graph(n, m, k, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def general_random_intersection_graph(n, m, p, seed=None): """Returns a random intersection graph with independent probabilities for connections between node and attribute sets. diff --git a/networkx/generators/interval_graph.py b/networkx/generators/interval_graph.py index 6373a24b34c6..2a3d760901c7 100644 --- a/networkx/generators/interval_graph.py +++ b/networkx/generators/interval_graph.py @@ -8,7 +8,7 @@ __all__ = ["interval_graph"] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def interval_graph(intervals): """Generates an interval graph for a list of intervals given. diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py index 7ad03288c865..c426df944ad2 100644 --- a/networkx/generators/joint_degree_seq.py +++ b/networkx/generators/joint_degree_seq.py @@ -142,7 +142,7 @@ def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def joint_degree_graph(joint_degrees, seed=None): """Generates a random simple graph with the given joint degree dictionary. @@ -469,7 +469,7 @@ def _directed_neighbor_switch_rev( @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): """Generates a random simple directed graph with the joint degree. diff --git a/networkx/generators/lattice.py b/networkx/generators/lattice.py index c046b8e9b118..95e520d2ce1f 100644 --- a/networkx/generators/lattice.py +++ b/networkx/generators/lattice.py @@ -32,7 +32,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number([0, 1]) def grid_2d_graph(m, n, periodic=False, create_using=None): """Returns the two-dimensional grid graph. @@ -86,7 +86,7 @@ def grid_2d_graph(m, n, periodic=False, create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def grid_graph(dim, periodic=False): """Returns the *n*-dimensional grid graph. @@ -143,7 +143,7 @@ def grid_graph(dim, periodic=False): return H -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def hypercube_graph(n): """Returns the *n*-dimensional hypercube graph. @@ -170,7 +170,7 @@ def hypercube_graph(n): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def triangular_lattice_graph( m, n, periodic=False, with_positions=True, create_using=None ): @@ -271,7 +271,7 @@ def triangular_lattice_graph( return H -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def hexagonal_lattice_graph( m, n, periodic=False, with_positions=True, create_using=None ): diff --git a/networkx/generators/line.py b/networkx/generators/line.py index b5974a038ebc..57f4d16927dd 100644 --- a/networkx/generators/line.py +++ b/networkx/generators/line.py @@ -10,7 +10,7 @@ __all__ = ["line_graph", "inverse_line_graph"] -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def line_graph(G, create_using=None): r"""Returns the line graph of the graph or digraph `G`. @@ -215,7 +215,7 @@ def _lg_undirected(G, selfloops=False, create_using=None): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def inverse_line_graph(G): """Returns the inverse line graph of graph G. diff --git a/networkx/generators/mycielski.py b/networkx/generators/mycielski.py index 492be58fd8ea..804b90369285 100644 --- a/networkx/generators/mycielski.py +++ b/networkx/generators/mycielski.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def mycielskian(G, iterations=1): r"""Returns the Mycielskian of a simple, undirected graph G @@ -68,7 +68,7 @@ def mycielskian(G, iterations=1): return M -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def mycielski_graph(n): """Generator for the n_th Mycielski Graph. diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index fbfcbd22e924..9d403cb848e1 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -12,7 +12,7 @@ import networkx as nx -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def nonisomorphic_trees(order, create="graph"): """Generates lists of nonisomorphic trees diff --git a/networkx/generators/random_clustered.py b/networkx/generators/random_clustered.py index abd7a8c9c6ea..edf4b94b3b93 100644 --- a/networkx/generators/random_clustered.py +++ b/networkx/generators/random_clustered.py @@ -7,7 +7,7 @@ @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): r"""Generate a random graph with the given joint independent edge degree and triangle degree sequence. diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py index dc65a9621239..b6f5ebf629d9 100644 --- a/networkx/generators/random_graphs.py +++ b/networkx/generators/random_graphs.py @@ -37,7 +37,7 @@ @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def fast_gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. @@ -109,7 +109,7 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. @@ -174,7 +174,7 @@ def gnp_random_graph(n, p, seed=None, directed=False): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def dense_gnm_random_graph(n, m, seed=None): """Returns a $G_{n,m}$ random graph. @@ -236,7 +236,7 @@ def dense_gnm_random_graph(n, m, seed=None): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnm_random_graph(n, m, seed=None, directed=False): """Returns a $G_{n,m}$ random graph. @@ -292,7 +292,7 @@ def gnm_random_graph(n, m, seed=None, directed=False): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def newman_watts_strogatz_graph(n, k, p, seed=None): """Returns a Newman–Watts–Strogatz small-world graph. @@ -363,7 +363,7 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def watts_strogatz_graph(n, k, p, seed=None): """Returns a Watts–Strogatz small-world graph. @@ -438,7 +438,7 @@ def watts_strogatz_graph(n, k, p, seed=None): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): """Returns a connected Watts–Strogatz small-world graph. @@ -491,7 +491,7 @@ def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_regular_graph(d, n, seed=None): r"""Returns a random $d$-regular graph on $n$ nodes. @@ -622,7 +622,7 @@ def _random_subset(seq, m, rng): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def barabasi_albert_graph(n, m, seed=None, initial_graph=None): """Returns a random graph using Barabási–Albert preferential attachment @@ -695,7 +695,7 @@ def barabasi_albert_graph(n, m, seed=None, initial_graph=None): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None): """Returns a random graph using dual Barabási–Albert preferential attachment @@ -795,7 +795,7 @@ def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None): @py_random_state(4) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def extended_barabasi_albert_graph(n, m, p, q, seed=None): """Returns an extended Barabási–Albert model graph. @@ -956,7 +956,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def powerlaw_cluster_graph(n, m, p, seed=None): """Holme and Kim algorithm for growing graphs with powerlaw degree distribution and approximate average clustering. @@ -1046,7 +1046,7 @@ def powerlaw_cluster_graph(n, m, p, seed=None): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_lobster(n, p1, p2, seed=None): """Returns a random lobster graph. @@ -1097,7 +1097,7 @@ def random_lobster(n, p1, p2, seed=None): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_shell_graph(constructor, seed=None): """Returns a random shell graph for the constructor given. @@ -1155,7 +1155,7 @@ def random_shell_graph(constructor, seed=None): @py_random_state(2) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): """Returns a tree with a power law degree distribution. @@ -1249,7 +1249,7 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): @py_random_state(3) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): r"""Returns an random graph based on the specified kernel. diff --git a/networkx/generators/small.py b/networkx/generators/small.py index 0efa62bc5217..ea33e7419245 100644 --- a/networkx/generators/small.py +++ b/networkx/generators/small.py @@ -59,7 +59,7 @@ def wrapper(*args, **kwargs): return wrapper -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def LCF_graph(n, shift_list, repeats, create_using=None): """ Return the cubic graph specified in LCF notation. @@ -126,7 +126,7 @@ def LCF_graph(n, shift_list, repeats, create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def bull_graph(create_using=None): """ Returns the Bull Graph @@ -160,7 +160,7 @@ def bull_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def chvatal_graph(create_using=None): """ Returns the Chvátal Graph @@ -205,7 +205,7 @@ def chvatal_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def cubical_graph(create_using=None): """ Returns the 3-regular Platonic Cubical Graph @@ -248,7 +248,7 @@ def cubical_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def desargues_graph(create_using=None): """ Returns the Desargues Graph @@ -279,7 +279,7 @@ def desargues_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def diamond_graph(create_using=None): """ Returns the Diamond graph @@ -308,7 +308,7 @@ def diamond_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def dodecahedral_graph(create_using=None): """ Returns the Platonic Dodecahedral graph. @@ -339,7 +339,7 @@ def dodecahedral_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def frucht_graph(create_using=None): """ Returns the Frucht Graph. @@ -386,7 +386,7 @@ def frucht_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def heawood_graph(create_using=None): """ Returns the Heawood Graph, a (3,6) cage. @@ -420,7 +420,7 @@ def heawood_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def hoffman_singleton_graph(): """ Returns the Hoffman-Singleton Graph. @@ -464,7 +464,7 @@ def hoffman_singleton_graph(): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def house_graph(create_using=None): """ Returns the House graph (square with triangle on top) @@ -495,7 +495,7 @@ def house_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def house_x_graph(create_using=None): """ Returns the House graph with a cross inside the house square. @@ -525,7 +525,7 @@ def house_x_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def icosahedral_graph(create_using=None): """ Returns the Platonic Icosahedral graph. @@ -568,7 +568,7 @@ def icosahedral_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def krackhardt_kite_graph(create_using=None): """ Returns the Krackhardt Kite Social Network. @@ -618,7 +618,7 @@ def krackhardt_kite_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def moebius_kantor_graph(create_using=None): """ Returns the Moebius-Kantor graph. @@ -648,7 +648,7 @@ def moebius_kantor_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def octahedral_graph(create_using=None): """ Returns the Platonic Octahedral graph. @@ -683,7 +683,7 @@ def octahedral_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def pappus_graph(): """ Returns the Pappus graph. @@ -707,7 +707,7 @@ def pappus_graph(): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def petersen_graph(create_using=None): """ Returns the Petersen graph. @@ -751,7 +751,7 @@ def petersen_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def sedgewick_maze_graph(create_using=None): """ Return a small maze with a cycle. @@ -784,7 +784,7 @@ def sedgewick_maze_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def tetrahedral_graph(create_using=None): """ Returns the 3-regular Platonic Tetrahedral graph. @@ -814,7 +814,7 @@ def tetrahedral_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def truncated_cube_graph(create_using=None): """ Returns the skeleton of the truncated cube. @@ -872,7 +872,7 @@ def truncated_cube_graph(create_using=None): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def truncated_tetrahedron_graph(create_using=None): """ Returns the skeleton of the truncated Platonic tetrahedron. @@ -903,7 +903,7 @@ def truncated_tetrahedron_graph(create_using=None): @_raise_on_directed -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def tutte_graph(create_using=None): """ Returns the Tutte graph. diff --git a/networkx/generators/social.py b/networkx/generators/social.py index 5d950649c3e7..6f150e2fc685 100644 --- a/networkx/generators/social.py +++ b/networkx/generators/social.py @@ -11,7 +11,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def karate_club_graph(): """Returns Zachary's Karate Club graph. @@ -93,7 +93,7 @@ def karate_club_graph(): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def davis_southern_women_graph(): """Returns Davis Southern women social network. @@ -244,7 +244,7 @@ def davis_southern_women_graph(): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def florentine_families_graph(): """Returns Florentine families graph. @@ -278,7 +278,7 @@ def florentine_families_graph(): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def les_miserables_graph(): """Returns coappearance network of characters in the novel Les Miserables. diff --git a/networkx/generators/spectral_graph_forge.py b/networkx/generators/spectral_graph_forge.py index cdd1250fa18c..3f4ee3ed9515 100644 --- a/networkx/generators/spectral_graph_forge.py +++ b/networkx/generators/spectral_graph_forge.py @@ -8,7 +8,7 @@ @np_random_state(3) -@nx._dispatchable +@nx._dispatchable(returns_graph=True) def spectral_graph_forge(G, alpha, transformation="identity", seed=None): """Returns a random simple graph with spectrum resembling that of `G` diff --git a/networkx/generators/stochastic.py b/networkx/generators/stochastic.py index 75d9c0b25b30..e3ce97e50d05 100644 --- a/networkx/generators/stochastic.py +++ b/networkx/generators/stochastic.py @@ -11,7 +11,9 @@ @not_implemented_for("undirected") -@nx._dispatchable(edge_attrs="weight") +@nx._dispatchable( + edge_attrs="weight", mutates_input={"not copy": 1}, returns_graph=True +) def stochastic_graph(G, copy=True, weight="weight"): """Returns a right-stochastic representation of directed graph `G`. diff --git a/networkx/generators/sudoku.py b/networkx/generators/sudoku.py index c8ac57c48c00..f288ed24d1f1 100644 --- a/networkx/generators/sudoku.py +++ b/networkx/generators/sudoku.py @@ -47,7 +47,7 @@ __all__ = ["sudoku_graph"] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def sudoku_graph(n=3): """Returns the n-Sudoku graph. The default value of n is 3. diff --git a/networkx/generators/time_series.py b/networkx/generators/time_series.py index ac060627bacc..6cf54b016698 100644 --- a/networkx/generators/time_series.py +++ b/networkx/generators/time_series.py @@ -8,7 +8,7 @@ __all__ = ["visibility_graph"] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def visibility_graph(series): """ Return a Visibility Graph of an input Time Series. diff --git a/networkx/generators/trees.py b/networkx/generators/trees.py index 60c2fc640e33..c1b0d793526b 100644 --- a/networkx/generators/trees.py +++ b/networkx/generators/trees.py @@ -48,7 +48,7 @@ ] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def prefix_tree(paths): """Creates a directed prefix tree from a list of paths. @@ -181,7 +181,7 @@ def get_children(parent, paths): return tree -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def prefix_tree_recursive(paths): """Recursively creates a directed prefix tree from a list of paths. @@ -324,7 +324,7 @@ def _helper(paths, root, tree): @py_random_state(1) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_tree(n, seed=None, create_using=None): """Returns a uniformly random tree on `n` nodes. @@ -426,7 +426,7 @@ def random_tree(n, seed=None, create_using=None): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_labeled_tree(n, *, seed=None): """Returns a labeled tree on `n` nodes chosen uniformly at random. @@ -462,7 +462,7 @@ def random_labeled_tree(n, *, seed=None): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_labeled_rooted_tree(n, *, seed=None): """Returns a labeled rooted tree with `n` nodes. @@ -499,7 +499,7 @@ def random_labeled_rooted_tree(n, *, seed=None): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_labeled_rooted_forest(n, *, seed=None): """Returns a labeled rooted forest with `n` nodes. @@ -737,7 +737,7 @@ def _random_unlabeled_rooted_tree(n, cache_trees, seed): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_unlabeled_rooted_tree(n, *, number_of_trees=None, seed=None): """Returns a number of unlabeled rooted trees uniformly at random @@ -922,7 +922,7 @@ def _random_unlabeled_rooted_forest(n, q, cache_trees, cache_forests, seed): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_unlabeled_rooted_forest(n, *, q=None, number_of_forests=None, seed=None): """Returns a forest or list of forests selected at random. @@ -1099,7 +1099,7 @@ def _random_unlabeled_tree(n, cache_trees, cache_forests, seed): @py_random_state("seed") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_unlabeled_tree(n, *, number_of_trees=None, seed=None): """Returns a tree or list of trees chosen randomly. diff --git a/networkx/generators/triads.py b/networkx/generators/triads.py index f9713b5df33e..5d380588bd35 100644 --- a/networkx/generators/triads.py +++ b/networkx/generators/triads.py @@ -33,7 +33,7 @@ } -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def triad_graph(triad_name): """Returns the triad graph with the given name. diff --git a/networkx/readwrite/adjlist.py b/networkx/readwrite/adjlist.py index a14ad430086e..56a1b4d2bf25 100644 --- a/networkx/readwrite/adjlist.py +++ b/networkx/readwrite/adjlist.py @@ -150,7 +150,7 @@ def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): path.write(line.encode(encoding)) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None ): @@ -224,7 +224,7 @@ def parse_adjlist( @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_adjlist( path, comments="#", diff --git a/networkx/readwrite/edgelist.py b/networkx/readwrite/edgelist.py index fcf6ec230132..ed445c80b423 100644 --- a/networkx/readwrite/edgelist.py +++ b/networkx/readwrite/edgelist.py @@ -173,7 +173,7 @@ def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="ut path.write(line.encode(encoding)) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): @@ -298,7 +298,7 @@ def parse_edgelist( @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_edgelist( path, comments="#", @@ -425,7 +425,7 @@ def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf- ) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_weighted_edgelist( path, comments="#", diff --git a/networkx/readwrite/gexf.py b/networkx/readwrite/gexf.py index de444c5b31a3..16b864377bdd 100644 --- a/networkx/readwrite/gexf.py +++ b/networkx/readwrite/gexf.py @@ -133,7 +133,7 @@ def generate_gexf(G, encoding="utf-8", prettyprint=True, version="1.2draft"): @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_gexf(path, node_type=None, relabel=False, version="1.2draft"): """Read graph in GEXF format from path. diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py index f76fc204203b..bec99154991f 100644 --- a/networkx/readwrite/gml.py +++ b/networkx/readwrite/gml.py @@ -112,7 +112,7 @@ def literal_destringizer(rep): @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_gml(path, label="label", destringizer=None): """Read graph in GML format from `path`. @@ -195,7 +195,7 @@ def filter_lines(lines): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_gml(lines, label="label", destringizer=None): """Parse GML graph from a string or iterable. diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py index 06578e05ed6e..5e2a30aa7918 100644 --- a/networkx/readwrite/graph6.py +++ b/networkx/readwrite/graph6.py @@ -60,7 +60,7 @@ def _generate_graph6_bytes(G, nodes, header): yield b"\n" -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_graph6_bytes(bytes_in): """Read a simple undirected graph in graph6 format from bytes. @@ -184,7 +184,7 @@ def to_graph6_bytes(G, nodes=None, header=True): @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_graph6(path): """Read simple undirected graphs in graph6 format from path. diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index 4de6ca1e8821..3a9ab6a451ae 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -233,7 +233,7 @@ def generate_graphml( @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False): """Read graph in GraphML format from path. @@ -306,7 +306,7 @@ def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False) return glist[0] -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_graphml( graphml_string, node_type=str, edge_key_type=int, force_multigraph=False ): diff --git a/networkx/readwrite/json_graph/adjacency.py b/networkx/readwrite/json_graph/adjacency.py index 8fb24ea031b9..3b05747565e7 100644 --- a/networkx/readwrite/json_graph/adjacency.py +++ b/networkx/readwrite/json_graph/adjacency.py @@ -81,7 +81,7 @@ def adjacency_data(G, attrs=_attrs): return data -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): """Returns graph from adjacency data format. diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py index 8c45a020731a..2f3b2176ab40 100644 --- a/networkx/readwrite/json_graph/cytoscape.py +++ b/networkx/readwrite/json_graph/cytoscape.py @@ -80,7 +80,7 @@ def cytoscape_data(G, name="name", ident="id"): return jsondata -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def cytoscape_graph(data, name="name", ident="id"): """ Create a NetworkX graph from a dictionary in cytoscape JSON format. diff --git a/networkx/readwrite/json_graph/node_link.py b/networkx/readwrite/json_graph/node_link.py index e5136bad0956..e29100f448ae 100644 --- a/networkx/readwrite/json_graph/node_link.py +++ b/networkx/readwrite/json_graph/node_link.py @@ -132,7 +132,7 @@ def node_link_data( return data -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def node_link_graph( data, directed=False, diff --git a/networkx/readwrite/json_graph/tree.py b/networkx/readwrite/json_graph/tree.py index bd8679fcab10..22b07b09d277 100644 --- a/networkx/readwrite/json_graph/tree.py +++ b/networkx/readwrite/json_graph/tree.py @@ -83,7 +83,7 @@ def add_children(n, G): return {**G.nodes[root], ident: root, children: add_children(root, G)} -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def tree_graph(data, ident="id", children="children"): """Returns graph from tree data format. diff --git a/networkx/readwrite/leda.py b/networkx/readwrite/leda.py index d07162b65f32..9fb57db14008 100644 --- a/networkx/readwrite/leda.py +++ b/networkx/readwrite/leda.py @@ -19,7 +19,7 @@ @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_leda(path, encoding="UTF-8"): """Read graph in LEDA format from path. @@ -46,7 +46,7 @@ def read_leda(path, encoding="UTF-8"): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_leda(lines): """Read graph in LEDA format from string or iterable. diff --git a/networkx/readwrite/multiline_adjlist.py b/networkx/readwrite/multiline_adjlist.py index fe542e6a1fd4..526b30a9b055 100644 --- a/networkx/readwrite/multiline_adjlist.py +++ b/networkx/readwrite/multiline_adjlist.py @@ -191,7 +191,7 @@ def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf- path.write(multiline.encode(encoding)) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_multiline_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None ): @@ -301,7 +301,7 @@ def parse_multiline_adjlist( @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_multiline_adjlist( path, comments="#", diff --git a/networkx/readwrite/p2g.py b/networkx/readwrite/p2g.py index 2afba8c6f4de..85f07ec84152 100644 --- a/networkx/readwrite/p2g.py +++ b/networkx/readwrite/p2g.py @@ -57,7 +57,7 @@ def write_p2g(G, path, encoding="utf-8"): @open_file(0, mode="r") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_p2g(path, encoding="utf-8"): """Read graph in p2g format from path. @@ -75,7 +75,7 @@ def read_p2g(path, encoding="utf-8"): return G -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_p2g(lines): """Parse p2g format graph from string or iterable. diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py index fef19408001c..f148f16208de 100644 --- a/networkx/readwrite/pajek.py +++ b/networkx/readwrite/pajek.py @@ -130,7 +130,7 @@ def write_pajek(G, path, encoding="UTF-8"): @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_pajek(path, encoding="UTF-8"): """Read graph in Pajek format from path. @@ -163,7 +163,7 @@ def read_pajek(path, encoding="UTF-8"): return parse_pajek(lines) -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def parse_pajek(lines): """Parse Pajek format graph from string or iterable. diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py index 04fe9e6edc6f..a70599438959 100644 --- a/networkx/readwrite/sparse6.py +++ b/networkx/readwrite/sparse6.py @@ -101,7 +101,7 @@ def enc(x): yield b"\n" -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_sparse6_bytes(string): """Read an undirected graph in sparse6 format from string. @@ -250,7 +250,7 @@ def to_sparse6_bytes(G, nodes=None, header=True): @open_file(0, mode="rb") -@nx._dispatchable(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def read_sparse6(path): """Read an undirected graph in sparse6 format from path. diff --git a/networkx/relabel.py b/networkx/relabel.py index 6e5f805d9ba6..4b870f726ef4 100644 --- a/networkx/relabel.py +++ b/networkx/relabel.py @@ -3,7 +3,9 @@ __all__ = ["convert_node_labels_to_integers", "relabel_nodes"] -@nx._dispatchable(preserve_all_attrs=True) +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True +) def relabel_nodes(G, mapping, copy=True): """Relabel the nodes of the graph G according to a given mapping. @@ -221,9 +223,7 @@ def _relabel_copy(G, mapping): return H -@nx._dispatchable( - preserve_edge_attrs=True, preserve_node_attrs=True, preserve_graph_attrs=True -) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def convert_node_labels_to_integers( G, first_label=0, ordering="default", label_attribute=None ): diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 8dfb82496de3..8a0e127caa87 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -179,6 +179,8 @@ def __new__( preserve_node_attrs=False, preserve_graph_attrs=False, preserve_all_attrs=False, + mutates_input=False, + returns_graph=False, ): """Dispatches to a backend algorithm based on input graph types. @@ -236,6 +238,18 @@ def __new__( Whether to preserve all edge, node and graph attributes. This overrides all the other preserve_*_attrs. + mutates_input : bool or dict, default False + For bool, whether the functions mutates an input graph argument. + For dict of ``{arg_name: arg_pos}``, arguments that indicates whether an + input graph will be mutated, and ``arg_name`` may begin with ``"not "`` + to negate the logic (for example, this is used by ``copy=`` arguments). + By default, dispatching doesn't convert input graphs to a different + backend for functions that mutate input graphs. + + returns_graph : bool, default False + Whether the function can return or yield a graph object. By default, + dispatching doesn't convert input graphs to a different backend for + functions that return graphs. """ if func is None: return partial( @@ -248,6 +262,8 @@ def __new__( preserve_node_attrs=preserve_node_attrs, preserve_graph_attrs=preserve_graph_attrs, preserve_all_attrs=preserve_all_attrs, + mutates_input=mutates_input, + returns_graph=returns_graph, ) if isinstance(func, str): raise TypeError("'name' and 'graphs' must be passed by keyword") from None @@ -283,6 +299,9 @@ def __new__( self.preserve_edge_attrs = preserve_edge_attrs or preserve_all_attrs self.preserve_node_attrs = preserve_node_attrs or preserve_all_attrs self.preserve_graph_attrs = preserve_graph_attrs or preserve_all_attrs + self.mutates_input = mutates_input + # Keep `returns_graph` private for now, b/c we may extend info on return types + self._returns_graph = returns_graph if edge_attrs is not None and not isinstance(edge_attrs, str | dict): raise TypeError( @@ -307,6 +326,16 @@ def __new__( f"Bad type for preserve_graph_attrs: {type(self.preserve_graph_attrs)}." " Expected bool or set." ) from None + if not isinstance(self.mutates_input, bool | dict): + raise TypeError( + f"Bad type for mutates_input: {type(self.mutates_input)}." + " Expected bool or dict." + ) from None + if not isinstance(self._returns_graph, bool): + raise TypeError( + f"Bad type for returns_graph: {type(self._returns_graph)}." + " Expected bool." + ) from None if isinstance(graphs, str): graphs = {graphs: 0} @@ -445,15 +474,6 @@ def __call__(self, /, *args, backend=None, **kwargs): # if (val := args[pos] if pos < len(args) else kwargs.get(gname)) is not None # } - if self._is_testing and self._automatic_backends and backend_name is None: - # Special path if we are running networkx tests with a backend. - return self._convert_and_call_for_tests( - self._automatic_backends[0], - args, - kwargs, - fallback_to_nx=self._fallback_to_nx, - ) - # Check if any graph comes from a backend if self.list_graphs: # Make sure we don't lose values by consuming an iterator @@ -492,6 +512,17 @@ def __call__(self, /, *args, backend=None, **kwargs): getattr(g, "__networkx_backend__", "networkx") for g in graphs_resolved.values() } + + if self._is_testing and self._automatic_backends and backend_name is None: + # Special path if we are running networkx tests with a backend. + # This even runs for (and handles) functions that mutate input graphs. + return self._convert_and_call_for_tests( + self._automatic_backends[0], + args, + kwargs, + fallback_to_nx=self._fallback_to_nx, + ) + if has_backends: # Dispatchable graphs found! Dispatch to backend function. # We don't handle calls with different backend graphs yet, @@ -524,7 +555,8 @@ def __call__(self, /, *args, backend=None, **kwargs): backend = _load_backend(graph_backend_name) if hasattr(backend, self.name): if "networkx" in graph_backend_names: - # We need to convert networkx graphs to backend graphs + # We need to convert networkx graphs to backend graphs. + # There is currently no need to check `self.mutates_input` here. return self._convert_and_call( graph_backend_name, args, @@ -545,8 +577,31 @@ def __call__(self, /, *args, backend=None, **kwargs): ) # Only networkx graphs; try to convert and run with a backend with automatic - # conversion, but don't do this by default for graph generators or loaders. - if self.graphs: + # conversion, but don't do this by default for graph generators or loaders, + # or if the functions mutates an input graph or returns a graph. + if ( + not self._returns_graph + and ( + not self.mutates_input + or isinstance(self.mutates_input, dict) + # If `mutates_input` begins with "not ", then assume the argument is boolean, + # otherwise treat it as a node or edge attribute if it's not None. + and any( + not ( + args[arg_pos] + if len(args) > arg_pos + else kwargs.get(arg_name[4:], True) + ) + if arg_name.startswith("not ") + else ( + args[arg_pos] if len(args) > arg_pos else kwargs.get(arg_name) + ) + is not None + for arg_name, arg_pos in self.mutates_input.items() + ) + ) + ): + # Should we warn or log if we don't convert b/c the input will be mutated? for backend_name in self._automatic_backends: if self._can_backend_run(backend_name, *args, **kwargs): return self._convert_and_call( @@ -861,6 +916,40 @@ def _convert_and_call_for_tests( pytest.xfail( exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}" ) + # Verify that `self._returns_graph` is correct. This compares the return type + # to the type expected from `self._returns_graph`. This handles tuple and list + # return types, but *does not* catch functions that yield graphs. + if ( + self._returns_graph + != ( + isinstance(result, nx.Graph) + or hasattr(result, "__networkx_backend__") + or isinstance(result, tuple | list) + and any( + isinstance(x, nx.Graph) or hasattr(x, "__networkx_backend__") + for x in result + ) + ) + and not ( + # May return Graph or None + self.name in {"check_planarity", "check_planarity_recursive"} + and any(x is None for x in result) + ) + and not ( + # May return Graph or dict + self.name in {"held_karp_ascent"} + and any(isinstance(x, dict) for x in result) + ) + and self.name + not in { + # yields graphs + "all_triads", + "general_k_edge_subgraphs", + # yields graphs or arrays + "nonisomorphic_trees", + } + ): + raise RuntimeError(f"`returns_graph` is incorrect for {self.name}") if self.name in { "edmonds_karp_core", From 611cc9a250515f9a0d7e88adf25d46e3fffed35d Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Wed, 28 Feb 2024 06:55:37 -0800 Subject: [PATCH 08/47] Deprecate the `create` argument of `nonisomorphic_trees` (#7316) * Add deprecation warning and docstring warning. * Add deprecated_call to tests. * Add warnings filter to test conf. * Add note to deprecation docs. --- doc/developer/deprecations.rst | 2 ++ networkx/conftest.py | 3 +++ networkx/generators/nonisomorphic_trees.py | 23 +++++++++++++++++++ .../tests/test_nonisomorphic_trees.py | 17 ++++++++------ 4 files changed, 38 insertions(+), 7 deletions(-) diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index fffe9ecef2f1..099d47cd4198 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -70,3 +70,5 @@ Version 3.5 to return a dict. See #6527 * Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` to return a iterator. See #6527 +* Remove ``create`` keyword argument from ``nonisomorphic_trees`` in + ``generators/nonisomorphic_trees``. diff --git a/networkx/conftest.py b/networkx/conftest.py index 1f0455175751..20894e2aded7 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -148,6 +148,9 @@ def set_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nk_corona" ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'" + ) @pytest.fixture(autouse=True) diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index 9d403cb848e1..9716cf33834a 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -25,6 +25,15 @@ def nonisomorphic_trees(order, create="graph"): If ``"graph"`` is selected a list of ``Graph`` instances will be returned, if matrix is selected a list of adjacency matrices will be returned. + .. deprecated:: 3.3 + + The `create` argument is deprecated and will be removed in NetworkX + version 3.5. In the future, `nonisomorphic_trees` will yield graph + instances by default. To generate adjacency matrices, call + ``nx.to_numpy_array`` on the output, e.g.:: + + [nx.to_numpy_array(G) for G in nx.nonisomorphic_trees(N)] + Yields ------ list @@ -45,6 +54,20 @@ def nonisomorphic_trees(order, create="graph"): if create == "graph": yield _layout_to_graph(layout) elif create == "matrix": + import warnings + + warnings.warn( + ( + "\n\nThe 'create=matrix' argument of nonisomorphic_trees\n" + "is deprecated and will be removed in version 3.5.\n" + "Use ``nx.to_numpy_array`` to convert graphs to adjacency " + "matrices, e.g.::\n\n" + " [nx.to_numpy_array(G) for G in nx.nonisomorphic_trees(N)]" + ), + category=DeprecationWarning, + stacklevel=2, + ) + yield _layout_to_matrix(layout) layout = _next_rooted_tree(layout) diff --git a/networkx/generators/tests/test_nonisomorphic_trees.py b/networkx/generators/tests/test_nonisomorphic_trees.py index ced1cbe15c9f..f654eac88413 100644 --- a/networkx/generators/tests/test_nonisomorphic_trees.py +++ b/networkx/generators/tests/test_nonisomorphic_trees.py @@ -1,10 +1,8 @@ """ -==================== -Generators - Non Isomorphic Trees -==================== - Unit tests for WROM algorithm generator in generators/nonisomorphic_trees.py """ +import pytest + import networkx as nx from networkx.utils import edges_equal @@ -54,11 +52,16 @@ def f(x): def test_nonisomorphic_trees_matrix(self): trees_2 = [[[0, 1], [1, 0]]] - assert list(nx.nonisomorphic_trees(2, create="matrix")) == trees_2 + with pytest.deprecated_call(): + assert list(nx.nonisomorphic_trees(2, create="matrix")) == trees_2 + trees_3 = [[[0, 1, 1], [1, 0, 0], [1, 0, 0]]] - assert list(nx.nonisomorphic_trees(3, create="matrix")) == trees_3 + with pytest.deprecated_call(): + assert list(nx.nonisomorphic_trees(3, create="matrix")) == trees_3 + trees_4 = [ [[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]], [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]], ] - assert list(nx.nonisomorphic_trees(4, create="matrix")) == trees_4 + with pytest.deprecated_call(): + assert list(nx.nonisomorphic_trees(4, create="matrix")) == trees_4 From f5ada3f9df4273e7fca8e6b393a61afe8d6507a4 Mon Sep 17 00:00:00 2001 From: Mauricio Souza de Alencar <856825+mdealencar@users.noreply.github.com> Date: Wed, 28 Feb 2024 15:59:34 +0100 Subject: [PATCH 09/47] fix: make `PlanarEmbedding.copy()` use `add_edges_from()` from parent (closes #7223) (#7224) fix: make `PlanarEmbedding.copy()` use `add_edges_from()` from parent class (planarity.py) --- networkx/algorithms/planarity.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index cfbb36a5c5ec..b8dcda60c837 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -1385,3 +1385,16 @@ def is_directed(self): contained. """ return False + + def copy(self, as_view=False): + if as_view is True: + return nx.graphviews.generic_graph_view(self) + G = self.__class__() + G.graph.update(self.graph) + G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) + super(self.__class__, G).add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) + return G From 26cbde337f65301faa03037eea2825cce4a43414 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Wed, 28 Feb 2024 13:57:13 -0600 Subject: [PATCH 10/47] Add explicit targets of missing modules for intersphinx (#7313) --- doc/reference/algorithms/assortativity.rst | 8 +++++ doc/reference/algorithms/centrality.rst | 34 ++++++++++++++++++++++ doc/reference/algorithms/coloring.rst | 3 ++ doc/reference/algorithms/community.rst | 2 +- doc/reference/algorithms/component.rst | 12 ++++++++ doc/reference/algorithms/flow.rst | 11 +++++++ doc/reference/algorithms/isomorphism.rst | 1 + doc/reference/algorithms/minors.rst | 2 ++ doc/reference/readwrite/json_graph.rst | 5 ++++ 9 files changed, 77 insertions(+), 1 deletion(-) diff --git a/doc/reference/algorithms/assortativity.rst b/doc/reference/algorithms/assortativity.rst index 8ec61677661b..a6afe231e6ab 100644 --- a/doc/reference/algorithms/assortativity.rst +++ b/doc/reference/algorithms/assortativity.rst @@ -6,6 +6,8 @@ Assortativity .. autosummary:: :toctree: generated/ +.. _networkx.algorithms.assortativity.correlation: + Assortativity ------------- .. autosummary:: @@ -16,6 +18,8 @@ Assortativity numeric_assortativity_coefficient degree_pearson_correlation_coefficient +.. _networkx.algorithms.assortativity.neighbor_degree: + Average neighbor degree ----------------------- .. autosummary:: @@ -23,6 +27,7 @@ Average neighbor degree average_neighbor_degree +.. _networkx.algorithms.assortativity.connectivity: Average degree connectivity --------------------------- @@ -31,6 +36,7 @@ Average degree connectivity average_degree_connectivity +.. _networkx.algorithms.assortativity.mixing: Mixing ------ @@ -43,6 +49,8 @@ Mixing degree_mixing_dict mixing_dict +.. _networkx.algorithms.assortativity.pairs: + Pairs ----- .. autosummary:: diff --git a/doc/reference/algorithms/centrality.rst b/doc/reference/algorithms/centrality.rst index b7d4001746f0..b0ea8437530c 100644 --- a/doc/reference/algorithms/centrality.rst +++ b/doc/reference/algorithms/centrality.rst @@ -4,6 +4,8 @@ Centrality .. automodule:: networkx.algorithms.centrality +.. _networkx.algorithms.centrality.degree_alg: + Degree ------ .. autosummary:: @@ -13,6 +15,9 @@ Degree in_degree_centrality out_degree_centrality +.. _networkx.algorithms.centrality.eigenvector: +.. _networkx.algorithms.centrality.katz: + Eigenvector ----------- .. autosummary:: @@ -23,6 +28,8 @@ Eigenvector katz_centrality katz_centrality_numpy +.. _networkx.algorithms.centrality.closeness: + Closeness --------- .. autosummary:: @@ -31,6 +38,8 @@ Closeness closeness_centrality incremental_closeness_centrality +.. _networkx.algorithms.centrality.current_flow_closeness: + Current Flow Closeness ---------------------- .. autosummary:: @@ -39,6 +48,9 @@ Current Flow Closeness current_flow_closeness_centrality information_centrality +.. _networkx.algorithms.centrality.betweenness: +.. _networkx.algorithms.centrality.betweenness_subset: + (Shortest Path) Betweenness --------------------------- .. autosummary:: @@ -49,6 +61,8 @@ Current Flow Closeness edge_betweenness_centrality edge_betweenness_centrality_subset +.. _networkx.algorithms.centrality.current_flow_betweenness: +.. _networkx.algorithms.centrality.current_flow_betweenness_subset: Current Flow Betweenness ------------------------ @@ -68,6 +82,8 @@ Communicability Betweenness communicability_betweenness_centrality +.. _networkx.algorithms.centrality.group: + Group Centrality ---------------- .. autosummary:: @@ -80,6 +96,8 @@ Group Centrality group_out_degree_centrality prominent_group +.. _networkx.algorithms.centrality.load: + Load ---- .. autosummary:: @@ -88,6 +106,8 @@ Load load_centrality edge_load_centrality +.. _networkx.algorithms.centrality.subgraph_alg: + Subgraph -------- .. autosummary:: @@ -97,6 +117,8 @@ Subgraph subgraph_centrality_exp estrada_index +.. _networkx.algorithms.centrality.harmonic: + Harmonic Centrality ------------------- .. autosummary:: @@ -111,6 +133,8 @@ Dispersion dispersion +.. _networkx.algorithms.centrality.reaching: + Reaching -------- .. autosummary:: @@ -119,6 +143,8 @@ Reaching local_reaching_centrality global_reaching_centrality +.. _networkx.algorithms.centrality.percolation: + Percolation ----------- .. autosummary:: @@ -126,6 +152,8 @@ Percolation percolation_centrality +.. _networkx.algorithms.centrality.second_order: + Second Order Centrality ----------------------- .. autosummary:: @@ -133,6 +161,8 @@ Second Order Centrality second_order_centrality +.. _networkx.algorithms.centrality.trophic: + Trophic ------- .. autosummary:: @@ -142,6 +172,8 @@ Trophic trophic_differences trophic_incoherence_parameter +.. _networkx.algorithms.centrality.voterank_alg: + VoteRank -------- .. autosummary:: @@ -149,6 +181,8 @@ VoteRank voterank +.. _networkx.algorithms.centrality.laplacian: + Laplacian --------- .. autosummary:: diff --git a/doc/reference/algorithms/coloring.rst b/doc/reference/algorithms/coloring.rst index a0e68ce769f1..5c21770d5281 100644 --- a/doc/reference/algorithms/coloring.rst +++ b/doc/reference/algorithms/coloring.rst @@ -1,3 +1,6 @@ +.. _networkx.algorithms.coloring.greedy_coloring: +.. _networkx.algorithms.coloring.equitable_coloring: + ******** Coloring ******** diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst index b031bcb349b9..0c62bf5bb133 100644 --- a/doc/reference/algorithms/community.rst +++ b/doc/reference/algorithms/community.rst @@ -15,7 +15,7 @@ Bipartitions kernighan_lin_bisection Divisive Communities ---------------------- +-------------------- .. automodule:: networkx.algorithms.community.divisive .. autosummary:: :toctree: generated/ diff --git a/doc/reference/algorithms/component.rst b/doc/reference/algorithms/component.rst index 23aa4c5bcf26..a0b25d1de6e4 100644 --- a/doc/reference/algorithms/component.rst +++ b/doc/reference/algorithms/component.rst @@ -3,6 +3,8 @@ Components ********** .. automodule:: networkx.algorithms.components +.. _networkx.algorithms.components.connected: + Connectivity ------------ .. autosummary:: @@ -13,6 +15,8 @@ Connectivity connected_components node_connected_component +.. _networkx.algorithms.components.strongly_connected: + Strong connectivity ------------------- .. autosummary:: @@ -25,6 +29,8 @@ Strong connectivity kosaraju_strongly_connected_components condensation +.. _networkx.algorithms.components.weakly_connected: + Weak connectivity ----------------- .. autosummary:: @@ -34,6 +40,8 @@ Weak connectivity number_weakly_connected_components weakly_connected_components +.. _networkx.algorithms.components.attracting: + Attracting components --------------------- .. autosummary:: @@ -43,6 +51,8 @@ Attracting components number_attracting_components attracting_components +.. _networkx.algorithms.components.biconnected: + Biconnected components ---------------------- .. autosummary:: @@ -53,6 +63,8 @@ Biconnected components biconnected_component_edges articulation_points +.. _networkx.algorithms.components.semiconnected: + Semiconnectedness ----------------- .. autosummary:: diff --git a/doc/reference/algorithms/flow.rst b/doc/reference/algorithms/flow.rst index 0888f052c7e0..8fe2936fee73 100644 --- a/doc/reference/algorithms/flow.rst +++ b/doc/reference/algorithms/flow.rst @@ -4,6 +4,7 @@ Flows .. automodule:: networkx.algorithms.flow +.. _networkx.algorithms.flow.maxflow: Maximum Flow ------------ @@ -15,6 +16,7 @@ Maximum Flow minimum_cut minimum_cut_value +.. _networkx.algorithms.flow.edmondskarp: Edmonds-Karp ------------ @@ -23,6 +25,7 @@ Edmonds-Karp edmonds_karp +.. _networkx.algorithms.flow.shortestaugmentingpath: Shortest Augmenting Path ------------------------ @@ -31,6 +34,7 @@ Shortest Augmenting Path shortest_augmenting_path +.. _networkx.algorithms.flow.preflowpush: Preflow-Push ------------ @@ -39,6 +43,7 @@ Preflow-Push preflow_push +.. _networkx.algorithms.flow.dinitz_alg: Dinitz ------ @@ -47,6 +52,7 @@ Dinitz dinitz +.. _networkx.algorithms.flow.boykovkolmogorov: Boykov-Kolmogorov ----------------- @@ -55,6 +61,7 @@ Boykov-Kolmogorov boykov_kolmogorov +.. _networkx.algorithms.flow.gomory_hu: Gomory-Hu Tree -------------- @@ -63,6 +70,7 @@ Gomory-Hu Tree gomory_hu_tree +.. _networkx.algorithms.flow.utils: Utils ----- @@ -71,6 +79,8 @@ Utils build_residual_network +.. _networkx.algorithms.flow.mincost: +.. _networkx.algorithms.flow.networksimplex: Network Simplex --------------- @@ -83,6 +93,7 @@ Network Simplex cost_of_flow max_flow_min_cost +.. _networkx.algorithms.flow.capacityscaling: Capacity Scaling Minimum Cost Flow ---------------------------------- diff --git a/doc/reference/algorithms/isomorphism.rst b/doc/reference/algorithms/isomorphism.rst index 1d64bd0e7f80..2010831c514e 100644 --- a/doc/reference/algorithms/isomorphism.rst +++ b/doc/reference/algorithms/isomorphism.rst @@ -1,4 +1,5 @@ .. _isomorphism: +.. _networkx.algorithms.isomorphism.isomorph: *********** Isomorphism diff --git a/doc/reference/algorithms/minors.rst b/doc/reference/algorithms/minors.rst index 77cf7385e968..cfe6677d3b5d 100644 --- a/doc/reference/algorithms/minors.rst +++ b/doc/reference/algorithms/minors.rst @@ -1,3 +1,5 @@ +.. _networkx.algorithms.minors.contraction: + ****** Minors ****** diff --git a/doc/reference/readwrite/json_graph.rst b/doc/reference/readwrite/json_graph.rst index 53368770ed45..2fed08049488 100644 --- a/doc/reference/readwrite/json_graph.rst +++ b/doc/reference/readwrite/json_graph.rst @@ -1,3 +1,8 @@ +.. _networkx.readwrite.json_graph.adjacency: +.. _networkx.readwrite.json_graph.cytoscape: +.. _networkx.readwrite.json_graph.node_link: +.. _networkx.readwrite.json_graph.tree: + JSON ==== .. automodule:: networkx.readwrite.json_graph From 051ffc1f75d63ff685141e8afc131a0e03325091 Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Wed, 28 Feb 2024 15:03:49 -0500 Subject: [PATCH 11/47] Allow seed of np.random instance to exactly produce arbitrarily large integers (#6869) * Allow numpy.random to produce arbitrary integers - Introduce PythonRandomViaNumpyBits to replace PythonRandomInterface Follows Robert Kern suggestion in Numpy Issue #244858 - set up to use old interface for possible Legacy users (RandomState that isn't the default RandomState) All others users dont have claim to maintain the stream - set up other input to provide new interface - rewrite old interface to not raise when high limit is > int64 value - Update decorator doc_strings to reflect numpy.random.Generators - shift to `random()` from `random_sample()` - correct the broken tests in test_decorators (assert a,b => assert a==b - add decorator tests for numpy Generator class - add smoke test of requesting very large int even with old interface - update tests of create_py_random_state to reflect changes * handle randrange default corner case --- networkx/utils/decorators.py | 30 +++--- networkx/utils/misc.py | 127 ++++++++++++++++++++---- networkx/utils/tests/test_decorators.py | 59 +++++++---- networkx/utils/tests/test_misc.py | 26 +++-- 4 files changed, 185 insertions(+), 57 deletions(-) diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py index ea37a506e199..205bf5005431 100644 --- a/networkx/utils/decorators.py +++ b/networkx/utils/decorators.py @@ -267,14 +267,15 @@ def _nodes_or_number(n): def np_random_state(random_state_argument): - """Decorator to generate a `numpy.random.RandomState` instance. + """Decorator to generate a numpy RandomState or Generator instance. The decorator processes the argument indicated by `random_state_argument` using :func:`nx.utils.create_random_state`. The argument value can be a seed (integer), or a `numpy.random.RandomState` - instance or (`None` or `numpy.random`). The latter options use the glocal - random number generator used by `numpy.random`. - The result is a `numpy.random.RandomState` instance. + or `numpy.random.RandomState` instance or (`None` or `numpy.random`). + The latter two options use the global random number generator for `numpy.random`. + + The returned instance is a `numpy.random.RandomState` or `numpy.random.Generator`. Parameters ---------- @@ -315,19 +316,24 @@ def random_array(dims, random_state=1): def py_random_state(random_state_argument): """Decorator to generate a random.Random instance (or equiv). - The decorator processes the argument indicated by `random_state_argument` - using :func:`nx.utils.create_py_random_state`. - The argument value can be a seed (integer), or a random number generator:: + This decorator processes `random_state_argument` using + :func:`nx.utils.create_py_random_state`. + The input value can be a seed (integer), or a random number generator:: If int, return a random.Random instance set with seed=int. If random.Random instance, return it. If None or the `random` package, return the global random number generator used by `random`. - If np.random package, return the global numpy random number - generator wrapped in a PythonRandomInterface class. - If np.random.RandomState instance, return it wrapped in - PythonRandomInterface - If a PythonRandomInterface instance, return it + If np.random package, or the default numpy RandomState instance, + return the default numpy random number generator wrapped in a + `PythonRandomViaNumpyBits` class. + If np.random.Generator instance, return it wrapped in a + `PythonRandomViaNumpyBits` class. + + # Legacy options + If np.random.RandomState instance, return it wrapped in a + `PythonRandomInterface` class. + If a `PythonRandomInterface` instance, return it Parameters ---------- diff --git a/networkx/utils/misc.py b/networkx/utils/misc.py index 54b9b17dcd60..6cc8707bd108 100644 --- a/networkx/utils/misc.py +++ b/networkx/utils/misc.py @@ -11,6 +11,7 @@ 1 """ +import random import sys import uuid import warnings @@ -30,6 +31,7 @@ "create_random_state", "create_py_random_state", "PythonRandomInterface", + "PythonRandomViaNumpyBits", "nodes_equal", "edges_equal", "graphs_equal", @@ -271,7 +273,68 @@ def create_random_state(random_state=None): raise ValueError(msg) +class PythonRandomViaNumpyBits(random.Random): + """Provide the random.random algorithms using a Numpy.random bit generator + + The intent is to allow people to contribute code that uses Python's random + library, but still allow users to provide a single easily controlled random + bit-stream for all work with NetworkX. This implementation is based on helpful + comments and code from Robert Kern on NumPy's GitHub Issue #24458. + + This implementation supercedes that of `PythonRandomInterface` which rewrote + methods to account for subtle differences in API between `random` and + `numpy.random`. Instead this subclasses `random.Random` and overwrites + the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`. + It makes them use the rng values from an input numpy `RandomState` or `Generator`. + Those few methods allow the rest of the `random.Random` methods to provide + the API interface of `random.random` whlie using randomness generated by + a numpy generator. + """ + + def __init__(self, rng=None): + try: + import numpy as np + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + if rng is None: + self._rng = np.random.mtrand._rand + else: + self._rng = rng + + # Not necessary, given our overriding of gauss() below, but it's + # in the superclass and nominally public, so initialize it here. + self.gauss_next = None + + def random(self): + """Get the next random number in the range 0.0 <= X < 1.0.""" + return self._rng.random() + + def getrandbits(self, k): + """getrandbits(k) -> x. Generates an int with k random bits.""" + if k < 0: + raise ValueError("number of bits must be non-negative") + numbytes = (k + 7) // 8 # bits / 8 and rounded up + x = int.from_bytes(self._rng.bytes(numbytes), "big") + return x >> (numbytes * 8 - k) # trim excess bits + + def getstate(self): + return self._rng.__getstate__() + + def setstate(self, state): + self._rng.__setstate__(state) + + def seed(self, *args, **kwds): + "Do nothing override method." + + +################################################################## class PythonRandomInterface: + """PythonRandomInterface is included for backward compatibility + New code should use PythonRandomViaNumpyBits instead. + """ + def __init__(self, rng=None): try: import numpy as np @@ -293,6 +356,12 @@ def uniform(self, a, b): def randrange(self, a, b=None): import numpy as np + if b is None: + a, b = 0, a + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randrange(a, b) + if isinstance(self._rng, np.random.Generator): return self._rng.integers(a, b) return self._rng.randint(a, b) @@ -323,6 +392,10 @@ def sample(self, seq, k): def randint(self, a, b): import numpy as np + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randint(a, b) + if isinstance(self._rng, np.random.Generator): return self._rng.integers(a, b + 1) return self._rng.randint(a, b + 1) @@ -357,32 +430,50 @@ def create_py_random_state(random_state=None): if random.Random instance, return it. if None or the `random` package, return the global random number generator used by `random`. - if np.random package, return the global numpy random number - generator wrapped in a PythonRandomInterface class. - if np.random.RandomState or np.random.Generator instance, return it - wrapped in PythonRandomInterface + if an np.random.Generator instance, or the np.random package, or + the global numpy random number generator, then return it + wrapped in a PythonRandomViaNumpyBits class. + if a PythonRandomViaNumpyBits instance, return it + + # Provided for backward bit-stream matching with legacy code + if a np.randomRandomState instance and not the global numpy default, + return it wrapped in PythonRandomInterface if a PythonRandomInterface instance, return it - """ - import random - - try: - import numpy as np - - if random_state is np.random: - return PythonRandomInterface(np.random.mtrand._rand) - if isinstance(random_state, np.random.RandomState | np.random.Generator): - return PythonRandomInterface(random_state) - if isinstance(random_state, PythonRandomInterface): - return random_state - except ImportError: - pass + Note: Conversion from older PythonRandomInterface to PythonRandomViaNumpyBits + is handled here to allow users of Legacy `numpy.random.RandomState` to exactly + match the legacy values produced. We assume that if a user cares about legacy + values, they are using a np.RandomState instance that is not the numpy default. + The default instance has state reset for each Python session. The Generator + class does not guarantee to maintain bit stream across versions. We wrap any + RandomState instance other than the default with `PythonRandomInterface`. + All other numpy random inputs are wrapped with `PythonRandomViaNumpyBits`. + """ if random_state is None or random_state is random: return random._inst if isinstance(random_state, random.Random): return random_state if isinstance(random_state, int): return random.Random(random_state) + + try: + import numpy as np + except ImportError: + pass + else: + if isinstance(random_state, PythonRandomInterface | PythonRandomViaNumpyBits): + return random_state + if isinstance(random_state, np.random.Generator): + return PythonRandomViaNumpyBits(random_state) + if random_state is np.random: + return PythonRandomViaNumpyBits(np.random.mtrand._rand) + + if isinstance(random_state, np.random.RandomState): + if random_state is np.random.mtrand._rand: + return PythonRandomViaNumpyBits(random_state) + # Only need older interface if specially constructed RandomState used + return PythonRandomInterface(random_state) + msg = f"{random_state} cannot be used to generate a random.Random instance" raise ValueError(msg) diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py index 01a0a8b4aef7..0a4aeabfe0b0 100644 --- a/networkx/utils/tests/test_decorators.py +++ b/networkx/utils/tests/test_decorators.py @@ -13,7 +13,7 @@ open_file, py_random_state, ) -from networkx.utils.misc import PythonRandomInterface +from networkx.utils.misc import PythonRandomInterface, PythonRandomViaNumpyBits def test_not_implemented_decorator(): @@ -212,17 +212,19 @@ def setup_class(cls): @np_random_state(1) def instantiate_np_random_state(self, random_state): - assert isinstance(random_state, np.random.RandomState) - return random_state.random_sample() + allowed = (np.random.RandomState, np.random.Generator) + assert isinstance(random_state, allowed) + return random_state.random() @py_random_state(1) def instantiate_py_random_state(self, random_state): - assert isinstance(random_state, random.Random | PythonRandomInterface) + allowed = (random.Random, PythonRandomInterface, PythonRandomViaNumpyBits) + assert isinstance(random_state, allowed) return random_state.random() def test_random_state_None(self): np.random.seed(42) - rv = np.random.random_sample() + rv = np.random.random() np.random.seed(42) assert rv == self.instantiate_np_random_state(None) @@ -233,7 +235,7 @@ def test_random_state_None(self): def test_random_state_np_random(self): np.random.seed(42) - rv = np.random.random_sample() + rv = np.random.random() np.random.seed(42) assert rv == self.instantiate_np_random_state(np.random) np.random.seed(42) @@ -241,7 +243,7 @@ def test_random_state_np_random(self): def test_random_state_int(self): np.random.seed(42) - np_rv = np.random.random_sample() + np_rv = np.random.random() random.seed(42) py_rv = random.random() @@ -249,39 +251,56 @@ def test_random_state_int(self): seed = 1 rval = self.instantiate_np_random_state(seed) rval_expected = np.random.RandomState(seed).rand() - assert rval, rval_expected + assert rval == rval_expected # test that global seed wasn't changed in function - assert np_rv == np.random.random_sample() + assert np_rv == np.random.random() random.seed(42) rval = self.instantiate_py_random_state(seed) rval_expected = random.Random(seed).random() - assert rval, rval_expected + assert rval == rval_expected # test that global seed wasn't changed in function assert py_rv == random.random() - def test_random_state_np_random_RandomState(self): + def test_random_state_np_random_Generator(self): np.random.seed(42) - np_rv = np.random.random_sample() + np_rv = np.random.random() + np.random.seed(42) + seed = 1 + + rng = np.random.default_rng(seed) + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.default_rng(seed).random() + assert rval == rval_expected + + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.default_rng(seed).random(size=2)[1] + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + def test_random_state_np_random_RandomState(self): + np.random.seed(42) + np_rv = np.random.random() np.random.seed(42) seed = 1 + rng = np.random.RandomState(seed) - rval = self.instantiate_np_random_state(seed) - rval_expected = np.random.RandomState(seed).rand() - assert rval, rval_expected + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.RandomState(seed).random() + assert rval == rval_expected - rval = self.instantiate_py_random_state(seed) - rval_expected = np.random.RandomState(seed).rand() - assert rval, rval_expected + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.RandomState(seed).random(size=2)[1] + assert rval == rval_expected # test that global seed wasn't changed in function - assert np_rv == np.random.random_sample() + assert np_rv == np.random.random() def test_random_state_py_random(self): seed = 1 rng = random.Random(seed) rv = self.instantiate_py_random_state(rng) - assert rv, random.Random(seed).random() + assert rv == random.Random(seed).random() pytest.raises(ValueError, self.instantiate_np_random_state, rng) diff --git a/networkx/utils/tests/test_misc.py b/networkx/utils/tests/test_misc.py index 38e9a0e5aa3e..eff36b2a2f3c 100644 --- a/networkx/utils/tests/test_misc.py +++ b/networkx/utils/tests/test_misc.py @@ -6,6 +6,7 @@ import networkx as nx from networkx.utils import ( PythonRandomInterface, + PythonRandomViaNumpyBits, arbitrary_element, create_py_random_state, create_random_state, @@ -184,21 +185,31 @@ def test_create_py_random_state(): rs = np.random.RandomState rng = np.random.default_rng(1000) rng_explicit = np.random.Generator(np.random.SFC64()) - nprs = PythonRandomInterface + old_nprs = PythonRandomInterface + nprs = PythonRandomViaNumpyBits assert isinstance(create_py_random_state(np.random), nprs) - assert isinstance(create_py_random_state(rs(1)), nprs) + assert isinstance(create_py_random_state(rs(1)), old_nprs) assert isinstance(create_py_random_state(rng), nprs) assert isinstance(create_py_random_state(rng_explicit), nprs) # test default rng input - assert isinstance(PythonRandomInterface(), nprs) + assert isinstance(PythonRandomInterface(), old_nprs) + assert isinstance(PythonRandomViaNumpyBits(), nprs) + + # VeryLargeIntegers Smoke test (they raise error for np.random) + int64max = 9223372036854775807 # from np.iinfo(np.int64).max + for r in (rng, rs(1)): + prs = create_py_random_state(r) + prs.randrange(3, int64max + 5) + prs.randint(3, int64max + 5) def test_PythonRandomInterface_RandomState(): np = pytest.importorskip("numpy") + seed = 42 rs = np.random.RandomState - rng = PythonRandomInterface(rs(42)) - rs42 = rs(42) + rng = PythonRandomInterface(rs(seed)) + rs42 = rs(seed) # make sure these functions are same as expected outcome assert rng.randrange(3, 5) == rs42.randint(3, 5) @@ -219,8 +230,9 @@ def test_PythonRandomInterface_RandomState(): def test_PythonRandomInterface_Generator(): np = pytest.importorskip("numpy") - rng = np.random.default_rng(42) - pri = PythonRandomInterface(np.random.default_rng(42)) + seed = 42 + rng = np.random.default_rng(seed) + pri = PythonRandomInterface(np.random.default_rng(seed)) # make sure these functions are same as expected outcome assert pri.randrange(3, 5) == rng.integers(3, 5) From 5934f7ed080d7614fa727dc8bf84bf221e1c37d0 Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Fri, 1 Mar 2024 00:52:16 -0500 Subject: [PATCH 12/47] DOC: add doc suggestions for arbitrarily large random integers tools (#7322) * follow up changes for arb large RNG * fix webpage --- networkx/utils/misc.py | 57 ++++++++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/networkx/utils/misc.py b/networkx/utils/misc.py index 6cc8707bd108..b8de5e5aa28a 100644 --- a/networkx/utils/misc.py +++ b/networkx/utils/misc.py @@ -274,7 +274,7 @@ def create_random_state(random_state=None): class PythonRandomViaNumpyBits(random.Random): - """Provide the random.random algorithms using a Numpy.random bit generator + """Provide the random.random algorithms using a numpy.random bit generator The intent is to allow people to contribute code that uses Python's random library, but still allow users to provide a single easily controlled random @@ -287,7 +287,7 @@ class PythonRandomViaNumpyBits(random.Random): the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`. It makes them use the rng values from an input numpy `RandomState` or `Generator`. Those few methods allow the rest of the `random.Random` methods to provide - the API interface of `random.random` whlie using randomness generated by + the API interface of `random.random` while using randomness generated by a numpy generator. """ @@ -327,6 +327,7 @@ def setstate(self, state): def seed(self, *args, **kwds): "Do nothing override method." + raise NotImplementedError("seed() not implemented in PythonRandomViaNumpyBits") ################################################################## @@ -426,28 +427,36 @@ def create_py_random_state(random_state=None): Parameters ---------- random_state : int or random number generator or None (default=None) - If int, return a random.Random instance set with seed=int. - if random.Random instance, return it. - if None or the `random` package, return the global random number - generator used by `random`. - if an np.random.Generator instance, or the np.random package, or - the global numpy random number generator, then return it - wrapped in a PythonRandomViaNumpyBits class. - if a PythonRandomViaNumpyBits instance, return it - - # Provided for backward bit-stream matching with legacy code - if a np.randomRandomState instance and not the global numpy default, - return it wrapped in PythonRandomInterface - if a PythonRandomInterface instance, return it - - Note: Conversion from older PythonRandomInterface to PythonRandomViaNumpyBits - is handled here to allow users of Legacy `numpy.random.RandomState` to exactly - match the legacy values produced. We assume that if a user cares about legacy - values, they are using a np.RandomState instance that is not the numpy default. - The default instance has state reset for each Python session. The Generator - class does not guarantee to maintain bit stream across versions. We wrap any - RandomState instance other than the default with `PythonRandomInterface`. - All other numpy random inputs are wrapped with `PythonRandomViaNumpyBits`. + - If int, return a `random.Random` instance set with seed=int. + - If `random.Random` instance, return it. + - If None or the `np.random` package, return the global random number + generator used by `np.random`. + - If an `np.random.Generator` instance, or the `np.random` package, or + the global numpy random number generator, then return it. + wrapped in a `PythonRandomViaNumpyBits` class. + - If a `PythonRandomViaNumpyBits` instance, return it. + - If a `PythonRandomInterface` instance, return it. + - If a `np.random.RandomState` instance and not the global numpy default, + return it wrapped in `PythonRandomInterface` for backward bit-stream + matching with legacy code. + + Notes + ----- + - A diagram intending to illustrate the relationships behind our support + for numpy random numbers is called + `NetworkX Numpy Random Numbers `_. + - More discussion about this support also appears in + `gh-6869#comment `_. + - Wrappers of numpy.random number generators allow them to mimic the Python random + number generation algorithms. For example, Python can create arbitrarily large + random ints, and the wrappers use Numpy bit-streams with CPython's random module + to choose arbitrarily large random integers too. + - We provide two wrapper classes: + `PythonRandomViaNumpyBits` is usually what you want and is always used for + `np.Generator` instances. But for users who need to recreate random numbers + produced in NetworkX 3.2 or earlier, we maintain the `PythonRandomInterface` + wrapper as well. We use it only used if passed a (non-default) `np.RandomState` + instance pre-initialized from a seed. Otherwise the newer wrapper is used. """ if random_state is None or random_state is random: return random._inst From c339da9b0e3c54990624b2b2af7b97af7491eef8 Mon Sep 17 00:00:00 2001 From: Salim BELHADDAD <17280667+salym@users.noreply.github.com> Date: Fri, 1 Mar 2024 06:57:49 +0100 Subject: [PATCH 13/47] Fix empty GraphML attribute is not parsed (#7319) * Empty GraphML attribute is not parsed (main branche) Fixes #7291 networkx does not parse GraphML attributes that are empty. This fix will create the attribute with an empty string. * added test test_graphml.py * minor changes (lint) --- networkx/readwrite/graphml.py | 3 ++- networkx/readwrite/tests/test_graphml.py | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index 3a9ab6a451ae..7d4fd7a66733 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -1010,9 +1010,10 @@ def decode_data_elements(self, graphml_keys, obj_xml): edge_label = data_element.find(f"{pref}EdgeLabel") if edge_label is not None: break - if edge_label is not None: data["label"] = edge_label.text + elif text is None: + data[data_name] = "" return data def find_graphml_keys(self, graph_element): diff --git a/networkx/readwrite/tests/test_graphml.py b/networkx/readwrite/tests/test_graphml.py index 2d87026ae8a0..5ffa837ea04f 100644 --- a/networkx/readwrite/tests/test_graphml.py +++ b/networkx/readwrite/tests/test_graphml.py @@ -1505,3 +1505,27 @@ def test_exception_for_unsupported_datatype_graph_attr(): fh = io.BytesIO() with pytest.raises(TypeError, match="GraphML does not support"): nx.write_graphml(G, fh) + + +def test_empty_attribute(): + """Tests that a GraphML string with an empty attribute can be parsed + correctly.""" + s = """ + + + + + + aaa + bbb + + + ccc + + + + """ + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + assert G.nodes["0"] == {"foo": "aaa", "bar": "bbb"} + assert G.nodes["1"] == {"foo": "ccc", "bar": ""} From 28f3e9f61ee66e779b3475dfedc3e159344f4309 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Fri, 1 Mar 2024 13:54:26 -0600 Subject: [PATCH 14/47] Avoid creating results with numpy scalars (re: NEP 51) (#7282) * Avoid creating results with numpy scalars (re: NEP 51) * Revert changes to `convert_matrix.py` to see what happens * Unrevert reversion * Better * respond to feedback: be more clear and use `x.item()` * Use `a.item(i)` where appropriate --- .../algorithms/assortativity/correlation.py | 6 +- .../algorithms/bipartite/extendability.py | 3 +- networkx/algorithms/bipartite/spectral.py | 4 +- .../centrality/current_flow_betweenness.py | 38 ++++++------ .../current_flow_betweenness_subset.py | 28 ++++----- .../centrality/current_flow_closeness.py | 18 +++--- networkx/algorithms/centrality/eigenvector.py | 2 +- networkx/algorithms/centrality/katz.py | 4 +- networkx/algorithms/centrality/laplacian.py | 2 +- .../algorithms/centrality/second_order.py | 5 +- .../algorithms/centrality/subgraph_alg.py | 5 +- networkx/algorithms/centrality/trophic.py | 4 +- networkx/algorithms/cluster.py | 58 +++++++++---------- networkx/algorithms/distance_measures.py | 19 +++--- networkx/algorithms/non_randomness.py | 2 +- networkx/algorithms/shortest_paths/generic.py | 2 +- networkx/algorithms/similarity.py | 15 ++--- networkx/algorithms/smallworld.py | 4 +- networkx/algorithms/walks.py | 2 +- networkx/conftest.py | 7 --- networkx/convert_matrix.py | 23 ++++---- networkx/generators/classic.py | 2 +- networkx/generators/expanders.py | 3 +- networkx/linalg/algebraicconnectivity.py | 8 +-- networkx/linalg/laplacianmatrix.py | 2 +- networkx/utils/backends.py | 48 ++++++++++++++- 26 files changed, 181 insertions(+), 133 deletions(-) diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index c62e92f9c2ef..170d219a5d4b 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -156,7 +156,7 @@ def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, node xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) x, y = zip(*xy) - return sp.stats.pearsonr(x, y)[0] + return float(sp.stats.pearsonr(x, y)[0]) @nx._dispatchable(node_attrs="attribute") @@ -280,7 +280,7 @@ def attribute_ac(M): s = (M @ M).sum() t = M.trace() r = (t - s) / (1 - s) - return r + return float(r) def _numeric_ac(M, mapping): @@ -299,4 +299,4 @@ def _numeric_ac(M, mapping): varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2 xy = np.outer(x, y) ab = np.outer(a[idx], b[idx]) - return (xy * (M - ab)).sum() / np.sqrt(vara * varb) + return float((xy * (M - ab)).sum() / np.sqrt(vara * varb)) diff --git a/networkx/algorithms/bipartite/extendability.py b/networkx/algorithms/bipartite/extendability.py index 10dd5473b5cf..0764997ad008 100644 --- a/networkx/algorithms/bipartite/extendability.py +++ b/networkx/algorithms/bipartite/extendability.py @@ -10,6 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") +@nx._dispatchable def maximal_extendability(G): """Computes the extendability of a graph. @@ -97,7 +98,7 @@ def maximal_extendability(G): # For node-pairs between V & U, keep min of max number of node-disjoint paths # Variable $k$ stands for the extendability of graph G - k = float("Inf") + k = float("inf") for u in U: for v in V: num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v)) diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py index 0ecd283775fb..61a56dd2c0e3 100644 --- a/networkx/algorithms/bipartite/spectral.py +++ b/networkx/algorithms/bipartite/spectral.py @@ -57,12 +57,12 @@ def spectral_bipartivity(G, nodes=None, weight="weight"): coshA = 0.5 * (expA + expmA) if nodes is None: # return single number for entire graph - return coshA.diagonal().sum() / expA.diagonal().sum() + return float(coshA.diagonal().sum() / expA.diagonal().sum()) else: # contribution for individual nodes index = dict(zip(nodelist, range(len(nodelist)))) sb = {} for n in nodes: i = index[n] - sb[n] = coshA[i, i] / expA[i, i] + sb[n] = coshA.item(i, i) / expA.item(i, i) return sb diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py index 9e68676b6a0f..b79a4c801e88 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness.py +++ b/networkx/algorithms/centrality/current_flow_betweenness.py @@ -134,7 +134,7 @@ def approximate_current_flow_betweenness_centrality( continue for nbr in H[v]: w = H[v][nbr].get(weight, 1.0) - betweenness[v] += w * np.abs(p[v] - p[nbr]) * cstar2k + betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k) if normalized: factor = 1.0 else: @@ -220,24 +220,22 @@ def current_flow_betweenness_centrality( """ if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): - pos = dict(zip(row.argsort()[::-1], range(n))) - for i in range(n): - betweenness[s] += (i - pos[i]) * row[i] - betweenness[t] += (n - i - 1 - pos[i]) * row[i] + pos = dict(zip(row.argsort()[::-1], range(N))) + for i in range(N): + betweenness[s] += (i - pos[i]) * row.item(i) + betweenness[t] += (N - i - 1 - pos[i]) * row.item(i) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 - for v in H: - betweenness[v] = float((betweenness[v] - v) * 2.0 / nb) - return {ordering[k]: v for k, v in betweenness.items()} + return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()} @not_implemented_for("directed") @@ -323,21 +321,21 @@ def edge_current_flow_betweenness_centrality( """ if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) edges = (tuple(sorted((u, v))) for u, v in H.edges()) betweenness = dict.fromkeys(edges, 0.0) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): - pos = dict(zip(row.argsort()[::-1], range(1, n + 1))) - for i in range(n): - betweenness[e] += (i + 1 - pos[i]) * row[i] - betweenness[e] += (n - i - pos[i]) * row[i] + pos = dict(zip(row.argsort()[::-1], range(1, N + 1))) + for i in range(N): + betweenness[e] += (i + 1 - pos[i]) * row.item(i) + betweenness[e] += (N - i - pos[i]) * row.item(i) betweenness[e] /= nb - return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py index 38e744a835ac..c6790b218e9d 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py +++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -96,27 +96,27 @@ def current_flow_betweenness_centrality_subset( if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - mapping = dict(zip(ordering, range(n))) + mapping = dict(zip(ordering, range(N))) H = nx.relabel_nodes(G, mapping) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: j = mapping[tt] - betweenness[s] += 0.5 * np.abs(row[i] - row[j]) - betweenness[t] += 0.5 * np.abs(row[i] - row[j]) + betweenness[s] += 0.5 * abs(row.item(i) - row.item(j)) + betweenness[t] += 0.5 * abs(row.item(i) - row.item(j)) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 - for v in H: - betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n) - return {ordering[k]: v for k, v in betweenness.items()} + for node in H: + betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N) + return {ordering[node]: value for node, value in betweenness.items()} @not_implemented_for("directed") @@ -204,16 +204,16 @@ def edge_current_flow_betweenness_centrality_subset( if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - mapping = dict(zip(ordering, range(n))) + mapping = dict(zip(ordering, range(N))) H = nx.relabel_nodes(G, mapping) edges = (tuple(sorted((u, v))) for u, v in H.edges()) betweenness = dict.fromkeys(edges, 0.0) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): @@ -221,6 +221,6 @@ def edge_current_flow_betweenness_centrality_subset( i = mapping[ss] for tt in targets: j = mapping[tt] - betweenness[e] += 0.5 * np.abs(row[i] - row[j]) + betweenness[e] += 0.5 * abs(row.item(i) - row.item(j)) betweenness[e] /= nb - return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py index c4c8dd56c3e0..92c892f74494 100644 --- a/networkx/algorithms/centrality/current_flow_closeness.py +++ b/networkx/algorithms/centrality/current_flow_closeness.py @@ -74,24 +74,22 @@ def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): "lu": SuperLUInverseLaplacian, "cg": CGInverseLaplacian, } - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - n = H.number_of_nodes() - L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc") + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H + N = H.number_of_nodes() + L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc") L = L.astype(dtype) C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver for v in H: col = C2.get_row(v) for w in H: - betweenness[v] += col[v] - 2 * col[w] - betweenness[w] += col[v] - for v in H: - betweenness[v] = 1 / (betweenness[v]) - return {ordering[k]: v for k, v in betweenness.items()} + betweenness[v] += col.item(v) - 2 * col.item(w) + betweenness[w] += col.item(v) + return {ordering[node]: 1 / value for node, value in betweenness.items()} information_centrality = current_flow_closeness_centrality diff --git a/networkx/algorithms/centrality/eigenvector.py b/networkx/algorithms/centrality/eigenvector.py index f7fcbf780602..ed57b2aeb321 100644 --- a/networkx/algorithms/centrality/eigenvector.py +++ b/networkx/algorithms/centrality/eigenvector.py @@ -338,4 +338,4 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): ) largest = eigenvector.flatten().real norm = np.sign(largest.sum()) * sp.linalg.norm(largest) - return dict(zip(G, largest / norm)) + return dict(zip(G, (largest / norm).tolist())) diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py index 527ec622fc4d..d85ffd2dcd29 100644 --- a/networkx/algorithms/centrality/katz.py +++ b/networkx/algorithms/centrality/katz.py @@ -325,6 +325,6 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): n = A.shape[0] centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze() - # Normalize: rely on truediv to cast to float + # Normalize: rely on truediv to cast to float, then tolist to make Python numbers norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1 - return dict(zip(nodelist, centrality / norm)) + return dict(zip(nodelist, (centrality / norm).tolist())) diff --git a/networkx/algorithms/centrality/laplacian.py b/networkx/algorithms/centrality/laplacian.py index 92ad463884bf..66207ed2189c 100644 --- a/networkx/algorithms/centrality/laplacian.py +++ b/networkx/algorithms/centrality/laplacian.py @@ -144,6 +144,6 @@ def laplacian_centrality( if normalized: lapl_cent = lapl_cent / full_energy - laplace_centralities_dict[node] = lapl_cent + laplace_centralities_dict[node] = float(lapl_cent) return laplace_centralities_dict diff --git a/networkx/algorithms/centrality/second_order.py b/networkx/algorithms/centrality/second_order.py index b08fe66b71f8..35583cd63e55 100644 --- a/networkx/algorithms/centrality/second_order.py +++ b/networkx/algorithms/centrality/second_order.py @@ -134,5 +134,8 @@ def _Qj(P, j): ) # eq 3 return dict( - zip(G.nodes, [np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1)) for i in range(n)]) + zip( + G.nodes, + (float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)), + ) ) # eq 6 diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py index 3234e854b3c1..29a284c547c5 100644 --- a/networkx/algorithms/centrality/subgraph_alg.py +++ b/networkx/algorithms/centrality/subgraph_alg.py @@ -278,7 +278,7 @@ def communicability_betweenness_centrality(G): B[i, :] = 0 B[:, i] = 0 B -= np.diag(np.diag(B)) - cbc[v] = B.sum() + cbc[v] = float(B.sum()) # put row and col back A[i, :] = row A[:, i] = col @@ -286,8 +286,7 @@ def communicability_betweenness_centrality(G): order = len(cbc) if order > 2: scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0)) - for v in cbc: - cbc[v] *= scale + cbc = {node: value * scale for node, value in cbc.items()} return cbc diff --git a/networkx/algorithms/centrality/trophic.py b/networkx/algorithms/centrality/trophic.py index d6dcca1526f2..6d1ba960ba9a 100644 --- a/networkx/algorithms/centrality/trophic.py +++ b/networkx/algorithms/centrality/trophic.py @@ -76,7 +76,7 @@ def trophic_levels(G, weight="weight"): # all other nodes have levels as calculated nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0) for i, node_id in enumerate(nonzero_node_ids): - levels[node_id] = y[i] + levels[node_id] = y.item(i) return levels @@ -159,4 +159,4 @@ def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): # Avoid copy otherwise G_2 = G diffs = trophic_differences(G_2, weight=weight) - return np.std(list(diffs.values())) + return float(np.std(list(diffs.values()))) diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 36e33f0226ce..6c91ad281350 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -143,10 +143,10 @@ def wt(u, v): # Only compute the edge weight once, before the inner inner # loop. wij = wt(i, j) - weighted_triangles += sum( - np.cbrt([(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]) - ) - yield (i, len(inbrs), 2 * weighted_triangles) + weighted_triangles += np.cbrt( + [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs] + ).sum() + yield (i, len(inbrs), 2 * float(weighted_triangles)) @not_implemented_for("multigraph") @@ -213,38 +213,38 @@ def wt(u, v): for j in ipreds: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]) - ) + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() for j in isuccs: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]) - ) + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() dtotal = len(ipreds) + len(isuccs) dbidirectional = len(ipreds & isuccs) - yield (i, dtotal, dbidirectional, directed_triangles) + yield (i, dtotal, dbidirectional, float(directed_triangles)) @nx._dispatchable(edge_attrs="weight") diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 310c3913f01a..8215b470af3b 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -735,14 +735,14 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr if nodeA is not None and nodeB is not None: i = node_list.index(nodeA) j = node_list.index(nodeB) - return Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) elif nodeA is not None: i = node_list.index(nodeA) d = {} for n in G: j = node_list.index(n) - d[n] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) return d elif nodeB is not None: @@ -750,7 +750,7 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr d = {} for n in G: i = node_list.index(n) - d[n] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) return d else: @@ -760,7 +760,12 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr d[n] = {} for n2 in G: j = node_list.index(n2) - d[n][n2] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n][n2] = ( + Linv.item(i, i) + + Linv.item(j, j) + - Linv.item(i, j) + - Linv.item(j, i) + ) return d @@ -832,7 +837,7 @@ def effective_graph_resistance(G, weight=None, invert_weight=True): # Disconnected graphs have infinite Effective graph resistance if not nx.is_connected(G): - return np.inf + return float("inf") # Invert weights G = G.copy() @@ -849,7 +854,7 @@ def effective_graph_resistance(G, weight=None, invert_weight=True): # Compute Effective graph resistance based on spectrum of the Laplacian # Self-loops are ignored - return np.sum(1 / mu[1:]) * G.number_of_nodes() + return float(np.sum(1 / mu[1:]) * G.number_of_nodes()) @nx.utils.not_implemented_for("directed") @@ -941,4 +946,4 @@ def kemeny_constant(G, *, weight=None): eig = np.sort(sp.linalg.eigvalsh(H.todense())) # Compute the Kemeny constant - return np.sum(1 / (1 - eig[:-1])) + return float(np.sum(1 / (1 - eig[:-1]))) diff --git a/networkx/algorithms/non_randomness.py b/networkx/algorithms/non_randomness.py index d0e9acbf6664..85483d330fac 100644 --- a/networkx/algorithms/non_randomness.py +++ b/networkx/algorithms/non_randomness.py @@ -84,7 +84,7 @@ def non_randomness(G, k=None, weight="weight"): # eq. 4.4 eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight)) - nr = np.real(np.sum(eigenvalues[:k])) + nr = float(np.real(np.sum(eigenvalues[:k]))) n = G.number_of_nodes() m = G.number_of_edges() diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index aa5fc5c89c13..81b72419acec 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -444,7 +444,7 @@ def path_length(v): all_pairs = nx.floyd_warshall(G, weight=weight) s = sum(sum(t.values()) for t in all_pairs.values()) elif method == "floyd-warshall-numpy": - s = nx.floyd_warshall_numpy(G, weight=weight).sum() + s = float(nx.floyd_warshall_numpy(G, weight=weight).sum()) return s / (n * (n - 1)) diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 68625d009ead..765849984eae 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1209,7 +1209,7 @@ def prune(cost): # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None) # print(vertex_path, edge_path, cost, file = sys.stderr) # assert cost == maxcost_value - yield list(vertex_path), list(edge_path), cost + yield list(vertex_path), list(edge_path), float(cost) @nx._dispatchable @@ -1350,10 +1350,10 @@ def simrank(G, u, v): if isinstance(x, np.ndarray): if x.ndim == 1: - return dict(zip(G, x)) + return dict(zip(G, x.tolist())) # else x.ndim == 2 - return {u: dict(zip(G, row)) for u, row in zip(G, x)} - return x + return {u: dict(zip(G, row)) for u, row in zip(G, x.tolist())} + return float(x) def _simrank_similarity_python( @@ -1653,8 +1653,9 @@ def panther_similarity( top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1] # Add back the similarity scores - top_k_sorted_names = (node_map[n] for n in top_k_sorted) - top_k_with_val = dict(zip(top_k_sorted_names, S[top_k_sorted])) + top_k_with_val = dict( + zip(node_map[top_k_sorted].tolist(), S[top_k_sorted].tolist()) + ) # Remove the self-similarity top_k_with_val.pop(source, None) @@ -1723,7 +1724,7 @@ def generate_random_paths( inv_row_sums = np.reciprocal(adj_mat.sum(axis=1)).reshape(-1, 1) transition_probabilities = adj_mat * inv_row_sums - node_map = np.array(G) + node_map = list(G) num_nodes = G.number_of_nodes() for path_index in range(sample_size): diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py index 8ddf74887f24..05ae1708202a 100644 --- a/networkx/algorithms/smallworld.py +++ b/networkx/algorithms/smallworld.py @@ -308,7 +308,7 @@ def sigma(G, niter=100, nrand=10, seed=None): sigma = (C / Cr) / (L / Lr) - return sigma + return float(sigma) @not_implemented_for("directed") @@ -400,4 +400,4 @@ def omega(G, niter=5, nrand=10, seed=None): omega = (Lr / L) - (C / Cl) - return omega + return float(omega) diff --git a/networkx/algorithms/walks.py b/networkx/algorithms/walks.py index 91214c8e7781..fe341757750d 100644 --- a/networkx/algorithms/walks.py +++ b/networkx/algorithms/walks.py @@ -74,7 +74,7 @@ def number_of_walks(G, walk_length): # power = sp.sparse.linalg.matrix_power(A, walk_length) power = np.linalg.matrix_power(A.toarray(), walk_length) result = { - u: {v: power[u_idx, v_idx] for v_idx, v in enumerate(G)} + u: {v: power.item(u_idx, v_idx) for v_idx, v in enumerate(G)} for u_idx, u in enumerate(G) } return result diff --git a/networkx/conftest.py b/networkx/conftest.py index 20894e2aded7..a6b63ef350e5 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -156,13 +156,6 @@ def set_warnings(): @pytest.fixture(autouse=True) def add_nx(doctest_namespace): doctest_namespace["nx"] = networkx - # TODO: remove the try-except block when we require numpy >= 2 - try: - import numpy as np - - np.set_printoptions(legacy="1.21") - except ImportError: - pass # What dependencies are installed? diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index f669a09ca5d2..2fc1e784f5b4 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -623,10 +623,11 @@ def _csr_gen_triples(A): """ nrows = A.shape[0] - data, indices, indptr = A.data, A.indices, A.indptr - for i in range(nrows): - for j in range(indptr[i], indptr[i + 1]): - yield i, int(indices[j]), data[j] + indptr, dst_indices, data = A.indptr, A.indices, A.data + import numpy as np + + src_indices = np.repeat(np.arange(nrows), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) def _csc_gen_triples(A): @@ -635,10 +636,11 @@ def _csc_gen_triples(A): """ ncols = A.shape[1] - data, indices, indptr = A.data, A.indices, A.indptr - for i in range(ncols): - for j in range(indptr[i], indptr[i + 1]): - yield int(indices[j]), i, data[j] + indptr, src_indices, data = A.indptr, A.indices, A.data + import numpy as np + + dst_indices = np.repeat(np.arange(ncols), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) def _coo_gen_triples(A): @@ -646,7 +648,7 @@ def _coo_gen_triples(A): of weighted edge triples. """ - return ((int(i), int(j), d) for i, j, d in zip(A.row, A.col, A.data)) + return zip(A.row.tolist(), A.col.tolist(), A.data.tolist()) def _dok_gen_triples(A): @@ -655,7 +657,8 @@ def _dok_gen_triples(A): """ for (r, c), v in A.items(): - yield r, c, v + # Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar + yield int(r), int(c), v.item() def _generate_weighted_edges(A): diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 54088be7519f..4278a338d919 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -803,7 +803,7 @@ def star_graph(n, create_using=None): """ n, nodes = n if isinstance(n, numbers.Integral): - nodes.append(n) # there should be n+1 nodes + nodes.append(int(n)) # there should be n+1 nodes G = empty_graph(nodes, create_using) if G.is_directed(): raise NetworkXError("Directed Graph not supported") diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index 85716e0e7a05..d1bddea3c95d 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -308,7 +308,8 @@ def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None) iterations -= 1 # Faster than random.permutation(n) since there are only # (n-1)! distinct cycles against n! permutations of size n - cycle = np.concatenate((seed.permutation(n - 1), [n - 1])) + cycle = seed.permutation(n - 1).tolist() + cycle.append(n - 1) new_edges = { (u, v) diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py index 70f23bbac970..870b4ec5f59d 100644 --- a/networkx/linalg/algebraicconnectivity.py +++ b/networkx/linalg/algebraicconnectivity.py @@ -222,7 +222,7 @@ def project(X): # element needs to modified. Changing to infinity forces a zero in the # corresponding element in the solution. i = (A.indptr[1:] - A.indptr[:-1]).argmax() - A[i, i] = float("inf") + A[i, i] = np.inf solver = _LUSolver(A) else: raise nx.NetworkXError(f"Unknown linear system solver: {method}") @@ -398,12 +398,12 @@ def algebraic_connectivity( L = nx.laplacian_matrix(G) if L.shape[0] == 2: - return 2.0 * L[0, 0] if not normalized else 2.0 + return 2.0 * float(L[0, 0]) if not normalized else 2.0 find_fiedler = _get_fiedler_func(method) x = None if method != "lobpcg" else _rcm_estimate(G, G) sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) - return sigma + return float(sigma) @not_implemented_for("directed") @@ -653,4 +653,4 @@ def spectral_bisection( nodes = np.array(list(G)) pos_vals = v >= 0 - return set(nodes[~pos_vals]), set(nodes[pos_vals]) + return set(nodes[~pos_vals].tolist()), set(nodes[pos_vals].tolist()) diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index 13aebff49980..bf2775a4f014 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -249,7 +249,7 @@ def total_spanning_tree_weight(G, weight=None): G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray() # Determinant ignoring first row and column - return abs(np.linalg.det(G_laplacian[1:, 1:])) + return float(abs(np.linalg.det(G_laplacian[1:, 1:]))) ############################################################################### diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 8a0e127caa87..1533dc554358 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -864,13 +864,15 @@ def _convert_and_call_for_tests( msg += " with the given arguments" pytest.xfail(msg) - from collections.abc import Iterator + from collections.abc import Iterable, Iterator, Mapping from copy import copy from io import BufferedReader, BytesIO from itertools import tee from random import Random + import numpy as np from numpy.random import Generator, RandomState + from scipy.sparse import sparray # We sometimes compare the backend result to the original result, # so we need two sets of arguments. We tee iterators and copy @@ -951,6 +953,50 @@ def _convert_and_call_for_tests( ): raise RuntimeError(f"`returns_graph` is incorrect for {self.name}") + def check_result(val, depth=0): + if isinstance(val, np.number): + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)}, depth={depth})" + ) + if isinstance(val, np.ndarray | sparray): + return + if isinstance(val, nx.Graph): + check_result(val._node, depth=depth + 1) + check_result(val._adj, depth=depth + 1) + return + if isinstance(val, Iterator): + raise NotImplementedError + if isinstance(val, Iterable) and not isinstance(val, str): + for x in val: + check_result(x, depth=depth + 1) + if isinstance(val, Mapping): + for x in val.values(): + check_result(x, depth=depth + 1) + + def check_iterator(it): + for val in it: + try: + check_result(val) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)})" + ) from exc + yield val + + if self.name in {"from_edgelist"}: + # numpy scalars are explicitly given as values in some tests + pass + elif isinstance(result, Iterator): + result = check_iterator(result) + else: + try: + check_result(result) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {result} ({type(result)})" + ) from exc + check_result(result) + if self.name in { "edmonds_karp_core", "barycenter", From 046eed45eeefec5c71d07f38fdeec4bd70a5b973 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 18:47:39 -0800 Subject: [PATCH 15/47] Bump changelist from 0.4 to 0.5 (#7325) Bumps [changelist](https://github.com/scientific-python/changelist) from 0.4 to 0.5. - [Release notes](https://github.com/scientific-python/changelist/releases) - [Changelog](https://github.com/scientific-python/changelist/blob/main/CHANGELOG.md) - [Commits](https://github.com/scientific-python/changelist/compare/v0.4...v0.5) --- updated-dependencies: - dependency-name: changelist dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- requirements/developer.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 47be7f9a6d3b..ba1d1db73455 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ default = [ 'pandas>=1.4', ] developer = [ - 'changelist==0.4', + 'changelist==0.5', 'pre-commit>=3.2', 'mypy>=1.1', 'rtoml', diff --git a/requirements/developer.txt b/requirements/developer.txt index bc1312bf30e4..b74a1f33b5c7 100644 --- a/requirements/developer.txt +++ b/requirements/developer.txt @@ -1,6 +1,6 @@ # Generated via tools/generate_requirements.py and pre-commit hook. # Do not edit this file; modify pyproject.toml instead. -changelist==0.4 +changelist==0.5 pre-commit>=3.2 mypy>=1.1 rtoml From 00e3c4c4ecad08bff17b894f6d39bcc96a3c583f Mon Sep 17 00:00:00 2001 From: peijenburg Date: Sun, 3 Mar 2024 05:45:14 +0100 Subject: [PATCH 16/47] Add number_of_spanning_trees (#7100) Adds number_of_spanning_trees and deprecates total_spanning_tree_weight in favor of the new function. --------- Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/developer/deprecations.rst | 1 + doc/reference/algorithms/tree.rst | 1 + networkx/algorithms/tree/mst.py | 135 ++++++++++++++++++ networkx/algorithms/tree/tests/test_mst.py | 123 ++++++++++++++++ networkx/conftest.py | 4 + networkx/linalg/laplacianmatrix.py | 155 ++++++++++++++++----- 6 files changed, 388 insertions(+), 31 deletions(-) diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index 099d47cd4198..a54295865ca1 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -70,5 +70,6 @@ Version 3.5 to return a dict. See #6527 * Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` to return a iterator. See #6527 +* Remove ``total_spanning_tree_weight`` from ``linalg/laplacianmatrix.py`` * Remove ``create`` keyword argument from ``nonisomorphic_trees`` in ``generators/nonisomorphic_trees``. diff --git a/doc/reference/algorithms/tree.rst b/doc/reference/algorithms/tree.rst index 4c7c3e6f585b..363d1e9665b0 100644 --- a/doc/reference/algorithms/tree.rst +++ b/doc/reference/algorithms/tree.rst @@ -64,6 +64,7 @@ Spanning Trees minimum_spanning_edges maximum_spanning_edges SpanningTreeIterator + number_of_spanning_trees Decomposition ------------- diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index 850536edf6cd..72c1980cb15c 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -18,6 +18,7 @@ "maximum_spanning_edges", "minimum_spanning_tree", "maximum_spanning_tree", + "number_of_spanning_trees", "random_spanning_tree", "partition_spanning_tree", "EdgePartition", @@ -1136,3 +1137,137 @@ def _clear_partition(self, G): for u, v, d in G.edges(data=True): if self.partition_key in d: del d[self.partition_key] + + +@nx._dispatchable(edge_attrs="weight") +def number_of_spanning_trees(G, *, root=None, weight=None): + """Returns the number of spanning trees in `G`. + + A spanning tree for an undirected graph is a tree that connects + all nodes in the graph. For a directed graph, the analog of a + spanning tree is called a (spanning) arborescence. The arborescence + includes a unique directed path from the `root` node to each other node. + The graph must be weakly connected, and the root must be a node + that includes all nodes as successors [3]_. Note that to avoid + discussing sink-roots and reverse-arborescences, we have reversed + the edge orientation from [3]_ and use the in-degree laplacian. + + This function (when `weight` is `None`) returns the number of + spanning trees for an undirected graph and the number of + arborescences from a single root node for a directed graph. + When `weight` is the name of an edge attribute which holds the + weight value of each edge, the function returns the sum over + all trees of the multiplicative weight of each tree. That is, + the weight of the tree is the product of its edge weights. + + Kirchoff's Tree Matrix Theorem states that any cofactor of the + Laplacian matrix of a graph is the number of spanning trees in the + graph. (Here we use cofactors for a diagonal entry so that the + cofactor becomes the determinant of the matrix with one row + and its matching column removed.) For a weighted Laplacian matrix, + the cofactor is the sum across all spanning trees of the + multiplicative weight of each tree. That is, the weight of each + tree is the product of its edge weights. The theorem is also + known as Kirchhoff's theorem [1]_ and the Matrix-Tree theorem [2]_. + + For directed graphs, a similar theorem (Tutte's Theorem) holds with + the cofactor chosen to be the one with row and column removed that + correspond to the root. The cofactor is the number of arborescences + with the specified node as root. And the weighted version gives the + sum of the arborescence weights with root `root`. The arborescence + weight is the product of its edge weights. + + Parameters + ---------- + G : NetworkX graph + + root : node + A node in the directed graph `G` that has all nodes as descendants. + (This is ignored for undirected graphs.) + + weight : string or None, optional (default=None) + The name of the edge attribute holding the edge weight. + If `None`, then each edge is assumed to have a weight of 1. + + Returns + ------- + Number + Undirected graphs: + The number of spanning trees of the graph `G`. + Or the sum of all spanning tree weights of the graph `G` + where the weight of a tree is the product of its edge weights. + Directed graphs: + The number of arborescences of `G` rooted at node `root`. + Or the sum of all arborescence weights of the graph `G` with + specified root where the weight of an arborescence is the product + of its edge weights. + + Raises + ------ + NetworkXPointlessConcept + If `G` does not contain any nodes. + + NetworkXError + If the graph `G` is directed and the root node + is not specified or is not in G. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.number_of_spanning_trees(G)) + 125 + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=2) + >>> G.add_edge(1, 3, weight=1) + >>> G.add_edge(2, 3, weight=1) + >>> round(nx.number_of_spanning_trees(G, weight="weight")) + 5 + + Notes + ----- + Self-loops are excluded. Multi-edges are contracted in one edge + equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kirchhoff's theorem." + https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem + .. [2] Kirchhoff, G. R. + Über die Auflösung der Gleichungen, auf welche man + bei der Untersuchung der linearen Vertheilung + Galvanischer Ströme geführt wird + Annalen der Physik und Chemie, vol. 72, pp. 497-508, 1847. + .. [3] Margoliash, J. + "Matrix-Tree Theorem for Directed Graphs" + https://www.math.uchicago.edu/~may/VIGRE/VIGRE2010/REUPapers/Margoliash.pdf + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph G must contain at least one node.") + + # undirected G + if not nx.is_directed(G): + if not nx.is_connected(G): + return 0 + G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray() + return float(np.linalg.det(G_laplacian[1:, 1:])) + + # directed G + if root is None: + raise nx.NetworkXError("Input `root` must be provided when G is directed") + if root not in G: + raise nx.NetworkXError("The node root is not in the graph G.") + if not nx.is_weakly_connected(G): + return 0 + + # Compute directed Laplacian matrix + nodelist = [root] + [n for n in G if n != root] + A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight) + D = np.diag(A.sum(axis=0)) + G_laplacian = D - A + + # Compute number of spanning trees + return float(np.linalg.det(G_laplacian[1:, 1:])) diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py index bc55000a6e6e..a6048d5472af 100644 --- a/networkx/algorithms/tree/tests/test_mst.py +++ b/networkx/algorithms/tree/tests/test_mst.py @@ -730,3 +730,126 @@ def test_random_spanning_tree_single_node_loop(): rst = nx.tree.random_spanning_tree(G) assert len(rst.nodes) == 1 assert len(rst.edges) == 0 + + +class TestNumberSpanningTrees: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def test_nst_disconnected(self): + G = nx.empty_graph(2) + assert np.isclose(nx.number_of_spanning_trees(G), 0) + + def test_nst_no_nodes(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept): + nx.number_of_spanning_trees(G) + + def test_nst_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), 5) + + def test_nst_negative_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=-1) + G.add_edge(2, 3, weight=-2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), -1) + + def test_nst_selfloop(self): + # self-loops are ignored + G = nx.complete_graph(3) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G), 3) + + def test_nst_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2) + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(2, 3) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_complete_graph(self): + # this is known as Cayley's formula + N = 5 + G = nx.complete_graph(N) + assert np.isclose(nx.number_of_spanning_trees(G), N ** (N - 2)) + + def test_nst_path_graph(self): + G = nx.path_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 1) + + def test_nst_cycle_graph(self): + G = nx.cycle_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_directed_noroot(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G) + + def test_nst_directed_root_not_exist(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G, root=42) + + def test_nst_directed_not_weak_connected(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(3, 4) + assert np.isclose(nx.number_of_spanning_trees(G, root=1), 0) + + def test_nst_directed_cycle_graph(self): + G = nx.DiGraph() + G = nx.cycle_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_complete_graph(self): + G = nx.DiGraph() + G = nx.complete_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 7**5) + + def test_nst_directed_multi(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 2) + + def test_nst_directed_selfloop(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_weak_connected(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.remove_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 0) + + def test_nst_directed_weighted(self): + # from root=1: + # arborescence 1: 1->2, 1->3, weight=2*1 + # arborescence 2: 1->2, 2->3, weight=2*3 + G = nx.DiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=3) + Nst = nx.number_of_spanning_trees(G, root=1, weight="weight") + assert np.isclose(Nst, 8) + Nst = nx.number_of_spanning_trees(G, root=2, weight="weight") + assert np.isclose(Nst, 0) + Nst = nx.number_of_spanning_trees(G, root=3, weight="weight") + assert np.isclose(Nst, 0) diff --git a/networkx/conftest.py b/networkx/conftest.py index a6b63ef350e5..2f5f6cfdaf75 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -148,6 +148,9 @@ def set_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nk_corona" ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight" + ) warnings.filterwarnings( "ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'" ) @@ -220,6 +223,7 @@ def add_nx(doctest_namespace): "algorithms/node_classification.py", "algorithms/non_randomness.py", "algorithms/shortest_paths/dense.py", + "algorithms/tree/mst.py", "generators/expanders.py", "linalg/bethehessianmatrix.py", "linalg/laplacianmatrix.py", diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index bf2775a4f014..1df4b422b76b 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -1,7 +1,7 @@ """Laplacian matrix of graphs. -All calculations here are done using the out-degree. For Laplacians -using in-degree, us `G.reverse(copy=False)` instead of `G`. +All calculations here are done using the out-degree. For Laplacians using +in-degree, use `G.reverse(copy=False)` instead of `G` and take the transpose. The `laplacian_matrix` function provides an unnormalized matrix, while `normalized_laplacian_matrix`, `directed_laplacian_matrix`, @@ -53,7 +53,8 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): or `directed_combinatorial_laplacian_matrix`. This calculation uses the out-degree of the graph `G`. To use the - in-degree for calculations instead, use `G.reverse(copy=False)` instead. + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. See Also -------- @@ -90,11 +91,26 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): [-1 2 -1 0] [ 0 0 1 -1] [ 0 0 -1 1]] - >>> G = nx.Graph(DiG) - >>> print(nx.laplacian_matrix(G).toarray()) + + Notice that node 4 is represented by the third column and row. This is because + by default the row/column order is the order of `G.nodes` (i.e. the node added + order -- in the edgelist, 4 first appears in (2, 4), before node 3 in edge (4, 3).) + To control the node order of the matrix, use the `nodelist` argument. + + >>> print(nx.laplacian_matrix(DiG, nodelist=[1, 2, 3, 4]).toarray()) [[ 1 -1 0 0] - [-1 2 -1 0] - [ 0 -1 2 -1] + [-1 2 0 -1] + [ 0 0 1 -1] + [ 0 0 -1 1]] + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + >>> print(nx.laplacian_matrix(DiG.reverse(copy=False)).toarray().T) + [[ 1 -1 0 0] + [-1 1 -1 0] + [ 0 0 2 -1] [ 0 0 -1 1]] References @@ -154,7 +170,8 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): the adjacency matrix [2]_. This calculation uses the out-degree of the graph `G`. To use the - in-degree for calculations instead, use `G.reverse(copy=False)` instead. + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. For an unnormalized output, use `laplacian_matrix`. @@ -176,7 +193,18 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): [-0.7071 1. -0.7071 0. ] [ 0. 0. 1. -1. ] [ 0. 0. -1. 1. ]] - >>> G = nx.Graph(DiG) + + Notice that node 4 is represented by the third column and row. This is because + by default the row/column order is the order of `G.nodes` (i.e. the node added + order -- in the edgelist, 4 first appears in (2, 4), before node 3 in edge (4, 3).) + To control the node order of the matrix, use the `nodelist` argument. + + >>> print(nx.normalized_laplacian_matrix(DiG, nodelist=[1, 2, 3, 4]).toarray()) + [[ 1. -0.7071 0. 0. ] + [-0.7071 1. 0. -0.7071] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] + >>> G = nx.Graph(edges) >>> print(nx.normalized_laplacian_matrix(G).toarray()) [[ 1. -0.7071 0. 0. ] [-0.7071 1. -0.5 0. ] @@ -206,50 +234,113 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): if nodelist is None: nodelist = list(G) A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr") - n, m = A.shape + n, _ = A.shape diags = A.sum(axis=1) # TODO: rm csr_array wrapper when spdiags can produce arrays - D = sp.sparse.csr_array(sp.sparse.spdiags(diags, 0, m, n, format="csr")) + D = sp.sparse.csr_array(sp.sparse.spdiags(diags, 0, n, n, format="csr")) L = D - A with np.errstate(divide="ignore"): diags_sqrt = 1.0 / np.sqrt(diags) diags_sqrt[np.isinf(diags_sqrt)] = 0 # TODO: rm csr_array wrapper when spdiags can produce arrays - DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr")) + DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, n, n, format="csr")) return DH @ (L @ DH) @nx._dispatchable(edge_attrs="weight") -def total_spanning_tree_weight(G, weight=None): +def total_spanning_tree_weight(G, weight=None, root=None): """ Returns the total weight of all spanning trees of `G`. - Kirchoff's Tree Matrix Theorem states that the determinant of any cofactor of the - Laplacian matrix of a graph is the number of spanning trees in the graph. For a - weighted Laplacian matrix, it is the sum across all spanning trees of the - multiplicative weight of each tree. That is, the weight of each tree is the - product of its edge weights. + Kirchoff's Tree Matrix Theorem [1]_, [2]_ states that the determinant of any + cofactor of the Laplacian matrix of a graph is the number of spanning trees + in the graph. For a weighted Laplacian matrix, it is the sum across all + spanning trees of the multiplicative weight of each tree. That is, the + weight of each tree is the product of its edge weights. + + For unweighted graphs, the total weight equals the number of spanning trees in `G`. + + For directed graphs, the total weight follows by summing over all directed + spanning trees in `G` that start in the `root` node [3]_. + + .. deprecated:: 3.3 + + ``total_spanning_tree_weight`` is deprecated and will be removed in v3.5. + Use ``nx.number_of_spanning_trees(G)`` instead. Parameters ---------- G : NetworkX Graph - The graph to use Kirchhoff's theorem on. - weight : string or None - The key for the edge attribute holding the edge weight. If `None`, then - each edge is assumed to have a weight of 1 and this function returns the - total number of spanning trees in `G`. + weight : string or None, optional (default=None) + The key for the edge attribute holding the edge weight. + If None, then each edge has weight 1. + + root : node (only required for directed graphs) + A node in the directed graph `G`. Returns ------- - float - The sum of the total multiplicative weights for all spanning trees in `G` + total_weight : float + Undirected graphs: + The sum of the total multiplicative weights for all spanning trees in `G`. + Directed graphs: + The sum of the total multiplicative weights for all spanning trees of `G`, + rooted at node `root`. + + Raises + ------ + NetworkXPointlessConcept + If `G` does not contain any nodes. + + NetworkXError + If the graph `G` is not (weakly) connected, + or if `G` is directed and the root node is not specified or not in G. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.total_spanning_tree_weight(G)) + 125 + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=2) + >>> G.add_edge(1, 3, weight=1) + >>> G.add_edge(2, 3, weight=1) + >>> round(nx.total_spanning_tree_weight(G, "weight")) + 5 + + Notes + ----- + Self-loops are excluded. Multi-edges are contracted in one edge + equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kirchhoff's theorem." + https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem + .. [2] Kirchhoff, G. R. + Über die Auflösung der Gleichungen, auf welche man + bei der Untersuchung der linearen Vertheilung + Galvanischer Ströme geführt wird + Annalen der Physik und Chemie, vol. 72, pp. 497-508, 1847. + .. [3] Margoliash, J. + "Matrix-Tree Theorem for Directed Graphs" + https://www.math.uchicago.edu/~may/VIGRE/VIGRE2010/REUPapers/Margoliash.pdf """ - import numpy as np + import warnings + + warnings.warn( + ( + "\n\ntotal_spanning_tree_weight is deprecated and will be removed in v3.5.\n" + "Use `nx.number_of_spanning_trees(G)` instead." + ), + category=DeprecationWarning, + stacklevel=3, + ) - G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray() - # Determinant ignoring first row and column - return float(abs(np.linalg.det(G_laplacian[1:, 1:]))) + return nx.number_of_spanning_trees(G, weight=weight, root=root) ############################################################################### @@ -313,7 +404,8 @@ def directed_laplacian_matrix( The result is always a symmetric matrix. This calculation uses the out-degree of the graph `G`. To use the - in-degree for calculations instead, use `G.reverse(copy=False)` instead. + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. See Also -------- @@ -411,7 +503,8 @@ def directed_combinatorial_laplacian_matrix( The result is always a symmetric matrix. This calculation uses the out-degree of the graph `G`. To use the - in-degree for calculations instead, use `G.reverse(copy=False)` instead. + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. See Also -------- From df96f4709b3b20f3e76aac4e30763882c5c4ed84 Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Mon, 4 Mar 2024 06:11:33 +0530 Subject: [PATCH 17/47] Improve test coverage for bipartite matrix.py (#7312) * improve test coverage for matrix.py * Update networkx/algorithms/bipartite/tests/test_matrix.py Co-authored-by: Ross Barnowski --------- Co-authored-by: Ross Barnowski --- networkx/algorithms/bipartite/tests/test_matrix.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/networkx/algorithms/bipartite/tests/test_matrix.py b/networkx/algorithms/bipartite/tests/test_matrix.py index 393b71e7ca29..53d83115118e 100644 --- a/networkx/algorithms/bipartite/tests/test_matrix.py +++ b/networkx/algorithms/bipartite/tests/test_matrix.py @@ -39,6 +39,11 @@ def test_biadjacency_matrix_order(self): M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") assert M[1, 2] == 2 + def test_biadjacency_matrix_empty_graph(self): + G = nx.empty_graph(2) + M = nx.bipartite.biadjacency_matrix(G, [0]) + assert np.array_equal(M.toarray(), np.array([[0]])) + def test_null_graph(self): with pytest.raises(nx.NetworkXError): bipartite.biadjacency_matrix(nx.Graph(), []) From 8fe7f49b1ed9f6a7b31497c4d0888b4f261eed93 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 3 Mar 2024 16:44:38 -0800 Subject: [PATCH 18/47] Try/except intermittently failing basemaps in geospatial examples (#7324) * Temporarily make poinst geospatial example non-executable. Revert this when contextily basemap issue is improved. * Revert "Temporarily rm geospatial examples to fix CI. (#7299)" This reverts commit fce5d7d58d1a50ccc94419e888be3ff4fd8b61aa. * Revert "Temporarily make poinst geospatial example non-executable." This reverts commit 02927e3d3d3f49dc72e6c8c74fe2978a266f5c31. * Try/except add_basemap in geospatial examples. --- examples/geospatial/{delaunay.py => plot_delaunay.py} | 6 +++++- examples/geospatial/{lines.py => plot_lines.py} | 10 ++++++++-- examples/geospatial/plot_points.py | 5 ++++- 3 files changed, 17 insertions(+), 4 deletions(-) rename examples/geospatial/{delaunay.py => plot_delaunay.py} (96%) rename examples/geospatial/{lines.py => plot_lines.py} (94%) diff --git a/examples/geospatial/delaunay.py b/examples/geospatial/plot_delaunay.py similarity index 96% rename from examples/geospatial/delaunay.py rename to examples/geospatial/plot_delaunay.py index edafe0635e16..799381fff745 100644 --- a/examples/geospatial/delaunay.py +++ b/examples/geospatial/plot_delaunay.py @@ -58,7 +58,11 @@ # Now, we can plot with a nice basemap. ax = cells.plot(facecolor="lightblue", alpha=0.50, edgecolor="cornsilk", linewidth=2) -add_basemap(ax) +try: # Try-except for issues with timeout/parsing failures in CI + add_basemap(ax) +except: + pass + ax.axis("off") nx.draw( delaunay_graph, diff --git a/examples/geospatial/lines.py b/examples/geospatial/plot_lines.py similarity index 94% rename from examples/geospatial/lines.py rename to examples/geospatial/plot_lines.py index 616db374369c..3de10223b3f3 100644 --- a/examples/geospatial/lines.py +++ b/examples/geospatial/plot_lines.py @@ -76,7 +76,10 @@ for i, facet in enumerate(ax): facet.set_title(("Streets", "Graph")[i]) facet.axis("off") - add_basemap(facet) + try: # For issues with downloading/parsing in CI + add_basemap(facet) + except: + pass nx.draw( G_primal, {n: [n[0], n[1]] for n in list(G_primal.nodes)}, ax=ax[1], node_size=50 ) @@ -92,7 +95,10 @@ for i, facet in enumerate(ax): facet.set_title(("Streets", "Graph")[i]) facet.axis("off") - add_basemap(facet) + try: # For issues with downloading/parsing in CI + add_basemap(facet) + except: + pass nx.draw(G_dual, {n: [n[0], n[1]] for n in list(G_dual.nodes)}, ax=ax[1], node_size=50) plt.show() diff --git a/examples/geospatial/plot_points.py b/examples/geospatial/plot_points.py index 7d9d99df8264..7517069cafa5 100644 --- a/examples/geospatial/plot_points.py +++ b/examples/geospatial/plot_points.py @@ -51,7 +51,10 @@ f, ax = plt.subplots(1, 2, figsize=(8, 4)) for i, facet in enumerate(ax): cases.plot(marker=".", color="orangered", ax=facet) - add_basemap(facet) + try: # For issues with downloading/parsing basemaps in CI + add_basemap(facet) + except: + pass facet.set_title(("KNN-3", "50-meter Distance Band")[i]) facet.axis("off") nx.draw(knn_graph, positions, ax=ax[0], node_size=5, node_color="b") From 7e97d4c040a39cc4cbe22b2ab78c10f028610bb9 Mon Sep 17 00:00:00 2001 From: BucketHeadP65 <104688161+BucketHeadP65@users.noreply.github.com> Date: Mon, 4 Mar 2024 05:46:41 +0200 Subject: [PATCH 19/47] Update __init__.py (#7320) * Update __init__.py * Update __init__.py corrected import order --- networkx/generators/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/networkx/generators/__init__.py b/networkx/generators/__init__.py index 7a1adc42a7bb..324d82b7e32f 100644 --- a/networkx/generators/__init__.py +++ b/networkx/generators/__init__.py @@ -12,6 +12,7 @@ from networkx.generators.ego import * from networkx.generators.expanders import * from networkx.generators.geometric import * +from networkx.generators.harary_graph import * from networkx.generators.internet_as_graphs import * from networkx.generators.intersection import * from networkx.generators.interval_graph import * From 41fd8df916c65c60f7403031a583f0ea5d7a8eec Mon Sep 17 00:00:00 2001 From: Aaron Z <40212329+aaronzo@users.noreply.github.com> Date: Tue, 5 Mar 2024 05:23:31 +0000 Subject: [PATCH 20/47] add seed to `nx.generate_random_paths` (#7332) add seed to generate_random_paths --- networkx/algorithms/similarity.py | 15 ++++++++++++--- networkx/algorithms/tests/test_similarity.py | 4 +--- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 765849984eae..24f303b13c99 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -20,6 +20,7 @@ from itertools import product import networkx as nx +from networkx.utils import np_random_state __all__ = [ "graph_edit_distance", @@ -1662,9 +1663,10 @@ def panther_similarity( return top_k_with_val +@np_random_state(5) @nx._dispatchable(edge_attrs="weight") def generate_random_paths( - G, sample_size, path_length=5, index_map=None, weight="weight" + G, sample_size, path_length=5, index_map=None, weight="weight", seed=None ): """Randomly generate `sample_size` paths of length `path_length`. @@ -1685,6 +1687,9 @@ def generate_random_paths( weight : string or None, optional (default="weight") The name of an edge attribute that holds the numerical value used as a weight. If None then each edge has weight 1. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -1718,6 +1723,10 @@ def generate_random_paths( """ import numpy as np + randint_fn = ( + seed.integers if isinstance(seed, np.random.Generator) else seed.randint + ) + # Calculate transition probabilities between # every pair of vertices according to Eq. (3) adj_mat = nx.to_numpy_array(G, weight=weight) @@ -1729,7 +1738,7 @@ def generate_random_paths( for path_index in range(sample_size): # Sample current vertex v = v_i uniformly at random - node_index = np.random.randint(0, high=num_nodes) + node_index = randint_fn(num_nodes) node = node_map[node_index] # Add v into p_r and add p_r into the path set @@ -1747,7 +1756,7 @@ def generate_random_paths( for _ in range(path_length): # Randomly sample a neighbor (v_j) according # to transition probabilities from ``node`` (v) to its neighbors - nbr_index = np.random.choice( + nbr_index = seed.choice( num_nodes, p=transition_probabilities[starting_index] ) diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py index 420c709c1739..3836ccfe182f 100644 --- a/networkx/algorithms/tests/test_similarity.py +++ b/networkx/algorithms/tests/test_similarity.py @@ -845,8 +845,6 @@ def test_panther_similarity_isolated(self): nx.panther_similarity(G, source=1) def test_generate_random_paths_unweighted(self): - np.random.seed(42) - index_map = {} num_paths = 10 path_length = 2 @@ -857,7 +855,7 @@ def test_generate_random_paths_unweighted(self): G.add_edge(1, 2) G.add_edge(2, 4) paths = nx.generate_random_paths( - G, num_paths, path_length=path_length, index_map=index_map + G, num_paths, path_length=path_length, index_map=index_map, seed=42 ) expected_paths = [ [3, 0, 3], From 1f8d279e230e9930a27fe27466ddd07a017a597e Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Mon, 4 Mar 2024 23:45:44 -0600 Subject: [PATCH 21/47] Allow backends to implement `should_run` (#7257) * Allow backends to implement `should_run` * Handle str return types for `can_run` and `should_run` --- networkx/utils/backends.py | 45 ++++++++++++++++++++++++++++++++++---- 1 file changed, 41 insertions(+), 4 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 1533dc554358..6c1ab0498b14 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -78,6 +78,17 @@ class WrappedSparse: If a backend only partially implements some algorithms, it can define a ``can_run(name, args, kwargs)`` function that returns True or False indicating whether it can run the algorithm with the given arguments. +It may also return a string indicating why the algorithm can't be run; +this string may be used in the future to give helpful info to the user. + +A backend may also define ``should_run(name, args, kwargs)`` that is similar +to ``can_run``, but answers whether the backend *should* be run (converting +if necessary). Like ``can_run``, it receives the original arguments so it +can decide whether it should be run by inspecting the arguments. ``can_run`` +runs before ``should_run``, so ``should_run`` may assume ``can_run`` is True. + +If not implemented by the backend, ``can_run`` and ``should_run`` are +assumed to always return True if the backend implements the algorithm. A special ``on_start_tests(items)`` function may be defined by the backend. It will be called with the list of NetworkX tests discovered. Each item @@ -135,10 +146,18 @@ def _get_backends(group, *, load_and_call=False): _loaded_backends = {} # type: ignore[var-annotated] +def _always_run(name, args, kwargs): + return True + + def _load_backend(backend_name): if backend_name in _loaded_backends: return _loaded_backends[backend_name] rv = _loaded_backends[backend_name] = backends[backend_name].load() + if not hasattr(rv, "can_run"): + rv.can_run = _always_run + if not hasattr(rv, "should_run"): + rv.should_run = _always_run return rv @@ -579,6 +598,7 @@ def __call__(self, /, *args, backend=None, **kwargs): # Only networkx graphs; try to convert and run with a backend with automatic # conversion, but don't do this by default for graph generators or loaders, # or if the functions mutates an input graph or returns a graph. + # Only convert and run if `backend.should_run(...)` returns True. if ( not self._returns_graph and ( @@ -603,7 +623,7 @@ def __call__(self, /, *args, backend=None, **kwargs): ): # Should we warn or log if we don't convert b/c the input will be mutated? for backend_name in self._automatic_backends: - if self._can_backend_run(backend_name, *args, **kwargs): + if self._should_backend_run(backend_name, *args, **kwargs): return self._convert_and_call( backend_name, args, @@ -614,10 +634,27 @@ def __call__(self, /, *args, backend=None, **kwargs): return self.orig_func(*args, **kwargs) def _can_backend_run(self, backend_name, /, *args, **kwargs): - """Can the specified backend run this algorithms with these arguments?""" + """Can the specified backend run this algorithm with these arguments?""" + backend = _load_backend(backend_name) + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. We plan to use the strings in the future. + return ( + hasattr(backend, self.name) + and (can_run := backend.can_run(self.name, args, kwargs)) + and not isinstance(can_run, str) + ) + + def _should_backend_run(self, backend_name, /, *args, **kwargs): + """Can/should the specified backend run this algorithm with these arguments?""" backend = _load_backend(backend_name) - return hasattr(backend, self.name) and ( - not hasattr(backend, "can_run") or backend.can_run(self.name, args, kwargs) + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. We plan to use the strings in the future. + return ( + hasattr(backend, self.name) + and (can_run := backend.can_run(self.name, args, kwargs)) + and not isinstance(can_run, str) + and (should_run := backend.should_run(self.name, args, kwargs)) + and not isinstance(should_run, str) ) def _convert_arguments(self, backend_name, args, kwargs): From 15df9159e8d952e114d7c1dfa002ff29b9d5b9ac Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Mon, 4 Mar 2024 22:29:32 -0800 Subject: [PATCH 22/47] Un-dispatch coloring strategies. (#7329) --- networkx/algorithms/coloring/greedy_coloring.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index 661f14e0149c..61bc953673f5 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -20,7 +20,6 @@ ] -@nx._dispatchable def strategy_largest_first(G, colors): """Returns a list of the nodes of ``G`` in decreasing order by degree. @@ -32,7 +31,6 @@ def strategy_largest_first(G, colors): @py_random_state(2) -@nx._dispatchable def strategy_random_sequential(G, colors, seed=None): """Returns a random permutation of the nodes of ``G`` as a list. @@ -47,7 +45,6 @@ def strategy_random_sequential(G, colors, seed=None): return nodes -@nx._dispatchable def strategy_smallest_last(G, colors): """Returns a deque of the nodes of ``G``, "smallest" last. @@ -121,7 +118,6 @@ def _maximal_independent_set(G): return result -@nx._dispatchable def strategy_independent_set(G, colors): """Uses a greedy independent set removal strategy to determine the colors. @@ -146,7 +142,6 @@ def strategy_independent_set(G, colors): yield from nodes -@nx._dispatchable def strategy_connected_sequential_bfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first traversal. @@ -160,7 +155,6 @@ def strategy_connected_sequential_bfs(G, colors): return strategy_connected_sequential(G, colors, "bfs") -@nx._dispatchable def strategy_connected_sequential_dfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a depth-first traversal. @@ -174,7 +168,6 @@ def strategy_connected_sequential_dfs(G, colors): return strategy_connected_sequential(G, colors, "dfs") -@nx._dispatchable def strategy_connected_sequential(G, colors, traversal="bfs"): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first or depth-first traversal. @@ -207,7 +200,6 @@ def strategy_connected_sequential(G, colors, traversal="bfs"): yield end -@nx._dispatchable def strategy_saturation_largest_first(G, colors): """Iterates over all the nodes of ``G`` in "saturation order" (also known as "DSATUR"). From e005b692785e8cfa6dbf4458aa46bc4ab757aa5c Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Mon, 4 Mar 2024 22:30:36 -0800 Subject: [PATCH 23/47] Undo change in return type of `single_target_shortest_path_length` (#7327) Fixup tests. --- networkx/algorithms/shortest_paths/tests/test_unweighted.py | 4 ++-- networkx/algorithms/shortest_paths/unweighted.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/networkx/algorithms/shortest_paths/tests/test_unweighted.py index ec0b3f757ed4..e2d999518a55 100644 --- a/networkx/algorithms/shortest_paths/tests/test_unweighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_unweighted.py @@ -92,9 +92,9 @@ def test_single_target_shortest_path(self): def test_single_target_shortest_path_length(self): pl = nx.single_target_shortest_path_length lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert pl(self.cycle, 0) == lengths + assert dict(pl(self.cycle, 0)) == lengths lengths = {0: 0, 1: 6, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} - assert pl(self.directed_cycle, 0) == lengths + assert dict(pl(self.directed_cycle, 0)) == lengths # test missing targets target = 8 with pytest.raises(nx.NodeNotFound, match=f"Target {target} is not in G"): diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index c7eea6fc6254..bb587bba991b 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -117,7 +117,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): Examples -------- >>> G = nx.path_graph(5, create_using=nx.DiGraph()) - >>> length = nx.single_target_shortest_path_length(G, 4) + >>> length = dict(nx.single_target_shortest_path_length(G, 4)) >>> length[0] 4 >>> for node in range(5): @@ -151,7 +151,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): nextlevel = [target] # for version 3.3 we will return a dict like this: # return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) - return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) + return _single_shortest_path_length(adj, nextlevel, cutoff) @nx._dispatchable From 19e6bc740319bcd428334d555525890a4c88ec73 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Mon, 4 Mar 2024 22:35:03 -0800 Subject: [PATCH 24/47] Update docstring example with future-proof pandas assignment. (#7323) * Update docstring example with future-proof pandas assignment. * Rm unnecessary import. Co-authored-by: Dan Schult --------- Co-authored-by: Dan Schult --- networkx/convert_matrix.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index 2fc1e784f5b4..6165ac18e31e 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -101,20 +101,21 @@ def to_pandas_adjacency( diagonal matrix entry value to the weight attribute of the edge (or the number 1 if the edge has no weight attribute). If the alternate convention of doubling the edge weight is desired the - resulting Pandas DataFrame can be modified as follows: - - >>> import pandas as pd - >>> pd.options.display.max_columns = 20 - >>> import numpy as np - >>> G = nx.Graph([(1, 1)]) - >>> df = nx.to_pandas_adjacency(G, dtype=int) - >>> df - 1 - 1 1 - >>> df.values[np.diag_indices_from(df)] *= 2 - >>> df - 1 - 1 2 + resulting Pandas DataFrame can be modified as follows:: + + >>> import pandas as pd + >>> G = nx.Graph([(1, 1), (2, 2)]) + >>> df = nx.to_pandas_adjacency(G) + >>> df + 1 2 + 1 1.0 0.0 + 2 0.0 1.0 + >>> diag_idx = list(range(len(df))) + >>> df.iloc[diag_idx, diag_idx] *= 2 + >>> df + 1 2 + 1 2.0 0.0 + 2 0.0 2.0 Examples -------- From 91337d52979b7673651267cd8e6c6d44c489b5df Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Wed, 6 Mar 2024 11:37:33 -0800 Subject: [PATCH 25/47] Remove animation from spectral clustering example to improve performance (#7328) * Rm 3d animation from spectral clustering example. * Add xref to 3D examples to illustrate how to animate 3D viz. * Vastly improves (>100x) example runtime. * Add sphinx-gallery config to allow xrefs. --- doc/conf.py | 1 + ...ge_segmentation_spectral_graph_partiion.py | 74 +++---------------- 2 files changed, 10 insertions(+), 65 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index e2eb2a547c97..cdd04b9fe523 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -54,6 +54,7 @@ "image_scrapers": ("matplotlib",), "matplotlib_animations": True, "plot_gallery": "True", + "reference_url": {"sphinx_gallery": None}, } # Add pygraphviz png scraper, if available try: diff --git a/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py b/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py index f2787a2b4c8c..197b86d1496d 100644 --- a/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py +++ b/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py @@ -1,12 +1,16 @@ """ -===================================================== +================================================== Image Segmentation via Spectral Graph Partitioning -===================================================== -Example of partitioning a undirected graph obtained by `k-neighbors` +================================================== + +Example of partitioning a undirected graph obtained by ``k-neighbors`` from an RGB image into two subgraphs using spectral clustering illustrated by 3D plots of the original labeled data points in RGB 3D space vs the bi-partition marking performed by graph partitioning via spectral clustering. -All 3D plots and animations use the 3D spectral layout. +All 3D plots use the 3D spectral layout. + +See :ref:`sphx_glr_auto_examples_3d_drawing` for recipes to create 3D animations +from these visualizations. """ import numpy as np import networkx as nx @@ -15,7 +19,7 @@ from matplotlib.lines import Line2D from sklearn.cluster import SpectralClustering -# sphinx_gallery_thumbnail_number = 4 +# sphinx_gallery_thumbnail_number = 3 ############################################################################### # Create an example 3D dataset "The Rings". @@ -129,40 +133,6 @@ def _scatter_plot(ax, X, array_of_markers, axis_plot=True): plt.show() -############################################################################### -# Generate the rotating animation of the clustered data. -# ------------------------------------------------------ -# The data points are marked according to clustering and rotated -# in the 3D animation. - - -def _init(): - ax.clear() - _scatter_plot(ax, X, array_of_markers) - ax.grid(False) - ax.set_axis_off() - ax.view_init(elev=6.0, azim=-22.0) - - -def _frame_update(index): - ax.view_init(6.0 + index * 0.2, -22.0 + index * 0.5) - - -fig = plt.figure(layout="tight") -ax = fig.add_subplot(111, projection="3d") -ax.grid(False) -ax.set_axis_off() -ani = animation.FuncAnimation( - fig, - _frame_update, - init_func=_init, - interval=50, - cache_frame_data=False, - frames=100, -) - -plt.show() - ############################################################################### # Generate the plots of the graph. @@ -217,29 +187,3 @@ def _3d_graph_plot(ax): _3d_graph_plot(ax1) plt.tight_layout() plt.show() - -############################################################################### -# Generate the rotating 3D animation of the graph. -# ------------------------------------------------ -# The nodes of the graph are marked according to clustering. -# The graph is rotated in the 3D animation. - - -def _frame_update(index): - ax.view_init(100.0 + index * 0.7, -100.0 + index * 0.5) - - -fig = plt.figure(layout="tight") -ax = fig.add_subplot(111, projection="3d") -ax.grid(False) -ax.set_axis_off() -_3d_graph_plot(ax) -ani = animation.FuncAnimation( - fig, - _frame_update, - interval=50, - cache_frame_data=False, - frames=100, -) - -plt.show() From 9e72994ccd96770cbde0ac548eb2db86adc6394b Mon Sep 17 00:00:00 2001 From: Matt Schwennesen Date: Thu, 7 Mar 2024 04:47:53 +0000 Subject: [PATCH 26/47] Add new test result to `test_asadpour_tsp` and change `linprog` method (#7335) fix linprog method, add new test result --- .../approximation/tests/test_traveling_salesman.py | 12 +++++++++--- .../algorithms/approximation/traveling_salesman.py | 4 +++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/networkx/algorithms/approximation/tests/test_traveling_salesman.py b/networkx/algorithms/approximation/tests/test_traveling_salesman.py index ccb553e1cc71..539b243549a9 100644 --- a/networkx/algorithms/approximation/tests/test_traveling_salesman.py +++ b/networkx/algorithms/approximation/tests/test_traveling_salesman.py @@ -756,9 +756,15 @@ def fixed_asadpour(G, weight): # the shortest path between those vertices, allowing vertices to appear more # than once. # - # However, we are using a fixed random number generator so we know what the - # expected tour is. - expected_tours = [[1, 4, 5, 0, 2, 3, 2, 1], [3, 2, 0, 1, 4, 5, 3]] + # Even though we are using a fixed seed, multiple tours have been known to + # be returned. The first two are from the original delevopment of this test, + # and the third one from issue #5913 on GitHub. If other tours are returned, + # add it on the list of expected tours. + expected_tours = [ + [1, 4, 5, 0, 2, 3, 2, 1], + [3, 2, 0, 1, 4, 5, 3], + [3, 2, 1, 0, 5, 4, 3], + ] assert tour in expected_tours diff --git a/networkx/algorithms/approximation/traveling_salesman.py b/networkx/algorithms/approximation/traveling_salesman.py index f3fae97e8ef0..7501daf41f57 100644 --- a/networkx/algorithms/approximation/traveling_salesman.py +++ b/networkx/algorithms/approximation/traveling_salesman.py @@ -683,7 +683,9 @@ def direction_of_ascent(): a_eq[n_count][arb_count] = deg - 2 n_count -= 1 a_eq[len(G)][arb_count] = 1 - program_result = optimize.linprog(c, A_eq=a_eq, b_eq=b_eq) + program_result = optimize.linprog( + c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm" + ) # If the constants exist, then the direction of ascent doesn't if program_result.success: # There is no direction of ascent From d71ce117bd4d13c3aa15425b732eb59029f46737 Mon Sep 17 00:00:00 2001 From: William Zijie Zhang <89562186+Transurgeon@users.noreply.github.com> Date: Thu, 7 Mar 2024 02:05:26 -0500 Subject: [PATCH 27/47] Adding tree broadcasting algorithm in a new module. (#6928) New functions to compute tree broadcast time for undirected graphs. Co-authored-by: Transurgeon Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/reference/algorithms/broadcasting.rst | 10 ++ doc/reference/algorithms/index.rst | 1 + networkx/algorithms/__init__.py | 1 + networkx/algorithms/broadcasting.py | 153 ++++++++++++++++++ .../algorithms/tests/test_broadcasting.py | 81 ++++++++++ 5 files changed, 246 insertions(+) create mode 100644 doc/reference/algorithms/broadcasting.rst create mode 100644 networkx/algorithms/broadcasting.py create mode 100644 networkx/algorithms/tests/test_broadcasting.py diff --git a/doc/reference/algorithms/broadcasting.rst b/doc/reference/algorithms/broadcasting.rst new file mode 100644 index 000000000000..3c2164982ff0 --- /dev/null +++ b/doc/reference/algorithms/broadcasting.rst @@ -0,0 +1,10 @@ +************ +Broadcasting +************ + +.. automodule:: networkx.algorithms.broadcasting +.. autosummary:: + :toctree: generated/ + + tree_broadcast_center + tree_broadcast_time diff --git a/doc/reference/algorithms/index.rst b/doc/reference/algorithms/index.rst index f8c5aff485d9..2dd8d8f054a6 100644 --- a/doc/reference/algorithms/index.rst +++ b/doc/reference/algorithms/index.rst @@ -15,6 +15,7 @@ Algorithms bipartite boundary bridges + broadcasting centrality chains chordal diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py index eda2912cadd0..56bfb14afdfb 100644 --- a/networkx/algorithms/__init__.py +++ b/networkx/algorithms/__init__.py @@ -1,6 +1,7 @@ from networkx.algorithms.assortativity import * from networkx.algorithms.asteroidal import * from networkx.algorithms.boundary import * +from networkx.algorithms.broadcasting import * from networkx.algorithms.bridges import * from networkx.algorithms.chains import * from networkx.algorithms.centrality import * diff --git a/networkx/algorithms/broadcasting.py b/networkx/algorithms/broadcasting.py new file mode 100644 index 000000000000..094ac5e23b3d --- /dev/null +++ b/networkx/algorithms/broadcasting.py @@ -0,0 +1,153 @@ +"""Routines to calculate the broadcast time of certain graphs. + +Broadcasting is an information dissemination problem in which a node in a graph, +called the originator, must distribute a message to all other nodes by placing +a series of calls along the edges of the graph. Once informed, other nodes aid +the originator in distributing the message. + +The broadcasting must be completed as quickly as possible subject to the +following constraints: +- Each call requires one unit of time. +- A node can only participate in one call per unit of time. +- Each call only involves two adjacent nodes: a sender and a receiver. +""" + +import networkx as nx +from networkx import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = [ + "tree_broadcast_center", + "tree_broadcast_time", +] + + +def _get_max_broadcast_value(G, U, v, values): + adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True) + return max(values[u] + i for i, u in enumerate(adj, start=1)) + + +def _get_broadcast_centers(G, v, values, target): + adj = sorted(G.neighbors(v), key=values.get, reverse=True) + j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target) + return set([v] + adj[:j]) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def tree_broadcast_center(G): + """Return the Broadcast Center of the tree `G`. + + The broadcast center of a graph G denotes the set of nodes having + minimum broadcast time [1]_. This is a linear algorithm for determining + the broadcast center of a tree with ``N`` nodes, as a by-product it also + determines the broadcast time from the broadcast center. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + + Returns + ------- + BC : (int, set) tuple + minimum broadcast number of the tree, set of broadcast centers + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T, + Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981) + """ + # Assert that the graph G is a tree + if not nx.is_tree(G): + NetworkXError("Input graph is not a tree") + # step 0 + if G.number_of_nodes() == 2: + return 1, set(G.nodes()) + if G.number_of_nodes() == 1: + return 0, set(G.nodes()) + + # step 1 + U = {node for node, deg in G.degree if deg == 1} + values = {n: 0 for n in U} + T = G.copy() + T.remove_nodes_from(U) + + # step 2 + W = {node for node, deg in T.degree if deg == 1} + values.update((w, G.degree[w] - 1) for w in W) + + # step 3 + while T.number_of_nodes() >= 2: + # step 4 + w = min(W, key=lambda n: values[n]) + v = next(T.neighbors(w)) + + # step 5 + U.add(w) + W.remove(w) + T.remove_node(w) + + # step 6 + if T.degree(v) == 1: + # update t(v) + values.update({v: _get_max_broadcast_value(G, U, v, values)}) + W.add(v) + + # step 7 + v = nx.utils.arbitrary_element(T) + b_T = _get_max_broadcast_value(G, U, v, values) + return b_T, _get_broadcast_centers(G, v, values, b_T) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def tree_broadcast_time(G, node=None): + """Return the Broadcast Time of the tree `G`. + + The minimum broadcast time of a node is defined as the minimum amount + of time required to complete broadcasting starting from the + originator. The broadcast time of a graph is the maximum over + all nodes of the minimum broadcast time from that node [1]_. + This function returns the minimum broadcast time of `node`. + If `node` is None the broadcast time for the graph is returned. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + node: int, optional + index of starting node. If `None`, the algorithm returns the broadcast + time of the tree. + + Returns + ------- + BT : int + Broadcast Time of a node in a tree + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Harutyunyan, H. A. and Li, Z. + "A Simple Construction of Broadcast Graphs." + In Computing and Combinatorics. COCOON 2019 + (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019. + """ + b_T, b_C = tree_broadcast_center(G) + if node is not None: + return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C) + dist_from_center = dict.fromkeys(G, len(G)) + for u in b_C: + for v, dist in nx.shortest_path_length(G, u).items(): + if dist < dist_from_center[v]: + dist_from_center[v] = dist + return b_T + max(dist_from_center.values()) diff --git a/networkx/algorithms/tests/test_broadcasting.py b/networkx/algorithms/tests/test_broadcasting.py new file mode 100644 index 000000000000..8ce34cf2bcc6 --- /dev/null +++ b/networkx/algorithms/tests/test_broadcasting.py @@ -0,0 +1,81 @@ +"""Unit tests for the broadcasting module.""" +import math + +import networkx as nx + + +def test_example_tree_broadcast(): + """ + Test the BROADCAST algorithm on the example in the paper titled: "Information Dissemination in Trees" + """ + edge_list = [ + (0, 1), + (1, 2), + (2, 7), + (3, 4), + (5, 4), + (4, 7), + (6, 7), + (7, 9), + (8, 9), + (9, 13), + (13, 14), + (14, 15), + (14, 16), + (14, 17), + (13, 11), + (11, 10), + (11, 12), + (13, 18), + (18, 19), + (18, 20), + ] + G = nx.Graph(edge_list) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == 6 + assert b_C == {13, 9} + # test broadcast time from specific vertex + assert nx.tree_broadcast_time(G, 17) == 8 + assert nx.tree_broadcast_time(G, 3) == 9 + # test broadcast time of entire tree + assert nx.tree_broadcast_time(G) == 10 + + +def test_path_broadcast(): + for i in range(2, 12): + G = nx.path_graph(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == math.ceil(i / 2) + assert b_C == { + math.ceil(i / 2), + math.floor(i / 2), + math.ceil(i / 2 - 1), + math.floor(i / 2 - 1), + } + assert nx.tree_broadcast_time(G) == i - 1 + + +def test_empty_graph_broadcast(): + H = nx.empty_graph(1) + b_T, b_C = nx.tree_broadcast_center(H) + assert b_T == 0 + assert b_C == {0} + assert nx.tree_broadcast_time(H) == 0 + + +def test_star_broadcast(): + for i in range(4, 12): + G = nx.star_graph(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == i + assert b_C == set(G.nodes()) + assert nx.tree_broadcast_time(G) == b_T + + +def test_binomial_tree_broadcast(): + for i in range(2, 8): + G = nx.binomial_tree(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == i + assert b_C == {0, 2 ** (i - 1)} + assert nx.tree_broadcast_time(G) == 2 * i - 1 From 7a62ecbf7b4cac198f42189339ebaad58f4216e0 Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Sun, 10 Mar 2024 08:22:16 +0530 Subject: [PATCH 28/47] Doc Improvements for Approximations Files (#7338) --- .../approximation/clustering_coefficient.py | 5 ++++ .../approximation/distance_measures.py | 9 ++++++ .../approximation/dominating_set.py | 22 +++++++++++++++ networkx/algorithms/approximation/maxcut.py | 28 +++++++++++++++++++ 4 files changed, 64 insertions(+) diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py index f95c86d2a918..545fc65533b8 100644 --- a/networkx/algorithms/approximation/clustering_coefficient.py +++ b/networkx/algorithms/approximation/clustering_coefficient.py @@ -45,6 +45,11 @@ def average_clustering(G, trials=1000, seed=None): >>> approximation.average_clustering(G, trials=1000, seed=10) 0.214 + Raises + ------ + NetworkXNotImplemented + If G is directed. + References ---------- .. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering diff --git a/networkx/algorithms/approximation/distance_measures.py b/networkx/algorithms/approximation/distance_measures.py index a6fece661b16..d5847e65a2a4 100644 --- a/networkx/algorithms/approximation/distance_measures.py +++ b/networkx/algorithms/approximation/distance_measures.py @@ -40,6 +40,15 @@ def diameter(G, seed=None): d : integer Lower Bound on the Diameter of G + Examples + -------- + >>> G = nx.path_graph(10) # undirected graph + >>> nx.diameter(G) + 9 + >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph + >>> nx.diameter(G) + 2 + Raises ------ NetworkXError diff --git a/networkx/algorithms/approximation/dominating_set.py b/networkx/algorithms/approximation/dominating_set.py index 691564cf9dcc..06ab97d97612 100644 --- a/networkx/algorithms/approximation/dominating_set.py +++ b/networkx/algorithms/approximation/dominating_set.py @@ -43,6 +43,17 @@ def min_weighted_dominating_set(G, weight=None): each node in the graph and `w(V^*)` denotes the sum of the weights of each node in the minimum weight dominating set. + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)]) + >>> nx.approximation.min_weighted_dominating_set(G) + {1, 2, 4} + + Raises + ------ + NetworkXNotImplemented + If G is directed. + Notes ----- This algorithm computes an approximate minimum weighted dominating @@ -115,6 +126,17 @@ def min_edge_dominating_set(G): min_edge_dominating_set : set Returns a set of dominating edges whose size is no more than 2 * OPT. + Examples + -------- + >>> G = nx.petersen_graph() + >>> nx.approximation.min_edge_dominating_set(G) + {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)} + + Raises + ------ + ValueError + If the input graph `G` is empty. + Notes ----- The algorithm computes an approximate solution to the edge dominating set diff --git a/networkx/algorithms/approximation/maxcut.py b/networkx/algorithms/approximation/maxcut.py index 0c30d224d2ff..f4e1da87c35a 100644 --- a/networkx/algorithms/approximation/maxcut.py +++ b/networkx/algorithms/approximation/maxcut.py @@ -39,6 +39,20 @@ def randomized_partitioning(G, seed=None, p=0.5, weight=None): partition : pair of node sets A partitioning of the nodes that defines a minimum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1) + >>> cut_size + 6 + >>> partition + ({0, 3, 4}, {1, 2}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. """ cut = {node for node in G.nodes() if seed.random() < p} cut_size = nx.algorithms.cut_size(G, cut, weight=weight) @@ -86,6 +100,20 @@ def one_exchange(G, initial_cut=None, seed=None, weight=None): partition : pair of node sets A partitioning of the nodes that defines a maximum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1) + >>> curr_cut_size + 6 + >>> partition + ({0, 2}, {1, 3, 4}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. """ if initial_cut is None: initial_cut = set() From c5cf756f65c3506e3bf63308c867b0fc155afa7d Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 10 Mar 2024 07:18:12 -0700 Subject: [PATCH 29/47] Expire steinertree mehlhorn futurewarning (#7337) * Use mehlhorn default for steiner_tree and rm warning. * Reduce local variable count. --- networkx/algorithms/approximation/steinertree.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index af5916442b92..47898cbf19cb 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -195,20 +195,12 @@ def steiner_tree(G, terminal_nodes, weight="weight", method=None): https://doi.org/10.1016/0020-0190(88)90066-X. """ if method is None: - import warnings - - msg = ( - "steiner_tree will change default method from 'kou' to 'mehlhorn' " - "in version 3.2.\nSet the `method` kwarg to remove this warning." - ) - warnings.warn(msg, FutureWarning, stacklevel=4) - method = "kou" + method = "mehlhorn" try: algo = ALGORITHMS[method] except KeyError as e: - msg = f"{method} is not a valid choice for an algorithm." - raise ValueError(msg) from e + raise ValueError(f"{method} is not a valid choice for an algorithm.") from e edges = algo(G, terminal_nodes, weight) # For multigraph we should add the minimal weight edge keys From b7816afaf560542cb0892bbabfab1c343b6f9936 Mon Sep 17 00:00:00 2001 From: Koen van den Berk Date: Sun, 10 Mar 2024 15:25:41 +0100 Subject: [PATCH 30/47] Fix custom weight attribute for Mehlhorn (#6681) * Fix custom weight attribute for Mehlhorn Using a custom weight attribute in the Mehlhorn implementation gave a KeyError. Reason: Edges in G_1_prime are given "weight" attributes, but upon updating, the weight (=/= "weight") of the existing edge in G_1_prime is get. Fix: set and get only "weight" attributes for G_1_prime. * Add test for non-default edge attr weight. * TST: parametrize on steiner method. * Add test for multigraph with non-default edge attr. --------- Co-authored-by: Ross Barnowski --- .../algorithms/approximation/steinertree.py | 2 +- .../approximation/tests/test_steinertree.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 47898cbf19cb..9b5c99488da6 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -64,7 +64,7 @@ def _mehlhorn_steiner_tree(G, terminal_nodes, weight): if not G_1_prime.has_edge(su, sv): G_1_prime.add_edge(su, sv, weight=weight_here) else: - new_weight = min(weight_here, G_1_prime[su][sv][weight]) + new_weight = min(weight_here, G_1_prime[su][sv]["weight"]) G_1_prime.add_edge(su, sv, weight=new_weight) G_2 = nx.minimum_spanning_edges(G_1_prime, data=True) diff --git a/networkx/algorithms/approximation/tests/test_steinertree.py b/networkx/algorithms/approximation/tests/test_steinertree.py index d7af1a1af410..95733f16ed19 100644 --- a/networkx/algorithms/approximation/tests/test_steinertree.py +++ b/networkx/algorithms/approximation/tests/test_steinertree.py @@ -189,3 +189,22 @@ def test_multigraph_steiner_tree(self): for method in self.methods: S = steiner_tree(G, terminal_nodes, method=method) assert edges_equal(S.edges(data=True, keys=True), expected_edges) + + +@pytest.mark.parametrize("method", ("kou", "mehlhorn")) +def test_steiner_tree_weight_attribute(method): + G = nx.star_graph(4) + # Add an edge attribute that is named something other than "weight" + nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance") + H = nx.approximation.steiner_tree(G, [1, 3], method=method, weight="distance") + assert nx.utils.edges_equal(H.edges, [(0, 1), (0, 3)]) + + +@pytest.mark.parametrize("method", ("kou", "mehlhorn")) +def test_steiner_tree_multigraph_weight_attribute(method): + G = nx.cycle_graph(3, create_using=nx.MultiGraph) + nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance") + G.add_edge(2, 0, distance=5) + H = nx.approximation.steiner_tree(G, list(G), method=method, weight="distance") + assert len(H.edges) == 2 and H.has_edge(2, 0, key=1) + assert sum(dist for *_, dist in H.edges(data="distance")) == 15 From 8f1e3a72a22c0e3c81e880671051a53825f4540d Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 10 Mar 2024 07:26:16 -0700 Subject: [PATCH 31/47] Update `LCF_graph` docstring (#7262) * Update lcf_graph docstring. Use numpydoc docstring standard and fixup formatting/examples. * Reword incorporating review suggestions. --- networkx/generators/small.py | 49 +++++++++++++++++++++++------------- 1 file changed, 32 insertions(+), 17 deletions(-) diff --git a/networkx/generators/small.py b/networkx/generators/small.py index ea33e7419245..acd2fbc7a34e 100644 --- a/networkx/generators/small.py +++ b/networkx/generators/small.py @@ -64,36 +64,51 @@ def LCF_graph(n, shift_list, repeats, create_using=None): """ Return the cubic graph specified in LCF notation. - LCF notation (LCF=Lederberg-Coxeter-Fruchte) is a compressed + LCF (Lederberg-Coxeter-Fruchte) notation[1]_ is a compressed notation used in the generation of various cubic Hamiltonian - graphs of high symmetry. See, for example, dodecahedral_graph, - desargues_graph, heawood_graph and pappus_graph below. + graphs of high symmetry. See, for example, `dodecahedral_graph`, + `desargues_graph`, `heawood_graph` and `pappus_graph`. - n (number of nodes) - The starting graph is the n-cycle with nodes 0,...,n-1. - (The null graph is returned if n < 0.) + Nodes are drawn from ``range(n)``. Each node ``n_i`` is connected with + node ``n_i + shift % n`` where ``shift`` is given by cycling through + the input `shift_list` `repeat` s times. - shift_list = [s1,s2,..,sk], a list of integer shifts mod n, + Parameters + ---------- + n : int + The starting graph is the `n`-cycle with nodes ``0, ..., n-1``. + The null graph is returned if `n` < 1. - repeats - integer specifying the number of times that shifts in shift_list - are successively applied to each v_current in the n-cycle - to generate an edge between v_current and v_current+shift mod n. + shift_list : list + A list of integer shifts mod `n`, ``[s1, s2, .., sk]`` - For v1 cycling through the n-cycle a total of k*repeats - with shift cycling through shiftlist repeats times connect - v1 with v1+shift mod n + repeats : int + Integer specifying the number of times that shifts in `shift_list` + are successively applied to each current node in the n-cycle + to generate an edge between ``n_current`` and ``n_current + shift mod n``. + Returns + ------- + G : Graph + A graph instance created from the specified LCF notation. + + Examples + -------- The utility graph $K_{3,3}$ >>> G = nx.LCF_graph(6, [3, -3], 3) + >>> G.edges() + EdgeView([(0, 1), (0, 5), (0, 3), (1, 2), (1, 4), (2, 3), (2, 5), (3, 4), (4, 5)]) - The Heawood graph + The Heawood graph: >>> G = nx.LCF_graph(14, [5, -5], 7) + >>> nx.is_isomorphic(G, nx.heawood_graph()) + True - See http://mathworld.wolfram.com/LCFNotation.html for a description - and references. + References + ---------- + .. [1] https://en.wikipedia.org/wiki/LCF_notation """ if n <= 0: From bd2cefc6d004f965775c832948bb0ce039e1659b Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 10 Mar 2024 07:31:19 -0700 Subject: [PATCH 32/47] Update louvain test modularity comparison to leq. (#7336) --- networkx/algorithms/community/tests/test_louvain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/algorithms/community/tests/test_louvain.py b/networkx/algorithms/community/tests/test_louvain.py index b47fb74c57e2..816e6f143fe0 100644 --- a/networkx/algorithms/community/tests/test_louvain.py +++ b/networkx/algorithms/community/tests/test_louvain.py @@ -236,7 +236,7 @@ def test_threshold(): mod1 = nx.community.modularity(G, partition1) mod2 = nx.community.modularity(G, partition2) - assert mod1 < mod2 + assert mod1 <= mod2 def test_empty_graph(): From ef5f9acb5b711346d6eb92b4bf01d32a75d4f57c Mon Sep 17 00:00:00 2001 From: Aaron Z <40212329+aaronzo@users.noreply.github.com> Date: Sun, 10 Mar 2024 16:44:03 +0000 Subject: [PATCH 33/47] Option to include initial labels in `weisfeiler_lehman_subgraph_hashes` (#6601) * docstring * change n -> u for node in docstring * fix issue 7330 * Update networkx/algorithms/graph_hashing.py Co-authored-by: Ross Barnowski --------- Co-authored-by: Ross Barnowski --- networkx/algorithms/graph_hashing.py | 65 +++++++++++-------- .../algorithms/tests/test_graph_hashing.py | 29 +++++++++ 2 files changed, 68 insertions(+), 26 deletions(-) diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py index bf78b7fb7079..b3761bf57988 100644 --- a/networkx/algorithms/graph_hashing.py +++ b/networkx/algorithms/graph_hashing.py @@ -56,19 +56,19 @@ def weisfeiler_lehman_graph_hash( Parameters ---------- - G: graph + G : graph The graph to be hashed. Can have node and/or edge attributes. Can also have no attributes. - edge_attr: string, default=None + edge_attr : string, optional (default=None) The key in edge attribute dictionary to be used for hashing. If None, edge labels are ignored. - node_attr: string, default=None + node_attr: string, optional (default=None) The key in node attribute dictionary to be used for hashing. If None, and no edge_attr given, use the degrees of the nodes as labels. - iterations: int, default=3 + iterations: int, optional (default=3) Number of neighbor aggregations to perform. Should be larger for larger graphs. - digest_size: int, default=16 + digest_size: int, optional (default=16) Size (in bits) of blake2b hash digest to use for hashing node labels. Returns @@ -162,7 +162,12 @@ def weisfeiler_lehman_step(G, labels, edge_attr=None): @nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def weisfeiler_lehman_subgraph_hashes( - G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 + G, + edge_attr=None, + node_attr=None, + iterations=3, + digest_size=16, + include_initial_labels=False, ): """ Return a dictionary of subgraph hashes by node. @@ -172,9 +177,10 @@ def weisfeiler_lehman_subgraph_hashes( Lists of subgraph hashes are sorted in increasing order of depth from their root node, with the hash at index i corresponding to a subgraph of nodes at most i edges distance from u. Thus, each list will contain - ``iterations + 1`` elements - a hash for a subgraph at each depth, and - additionally a hash of the initial node label (or equivalently a - subgraph of depth 0) + `iterations` elements - a hash for a subgraph at each depth. If + `include_initial_labels` is set to `True`, each list will additionally + have contain a hash of the initial node label (or equivalently a + subgraph of depth 0) prepended, totalling ``iterations + 1`` elements. The function iteratively aggregates and hashes neighborhoods of each node. This is achieved for each step by replacing for each node its label from @@ -182,15 +188,15 @@ def weisfeiler_lehman_subgraph_hashes( The new node label is then appended to a list of node labels for each node. - To aggregate neighborhoods at each step for a node $n$, all labels of - nodes adjacent to $n$ are concatenated. If the `edge_attr` parameter is set, + To aggregate neighborhoods for a node $u$ at each step, all labels of + nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set, labels for each neighboring node are prefixed with the value of this attribute - along the connecting edge from this neighbor to node $n$. The resulting string + along the connecting edge from this neighbor to node $u$. The resulting string is then hashed to compress this information into a fixed digest size. Thus, at the $i$-th iteration, nodes within $i$ hops influence any given - hashed node label. We can therefore say that at depth $i$ for node $n$ - we have a hash for a subgraph induced by the $2i$-hop neighborhood of $n$. + hashed node label. We can therefore say that at depth $i$ for node $u$ + we have a hash for a subgraph induced by the $i$-hop neighborhood of $u$. The output can be used to to create general Weisfeiler-Lehman graph kernels, or generate features for graphs or nodes - for example to generate 'words' in @@ -207,21 +213,25 @@ def weisfeiler_lehman_subgraph_hashes( Parameters ---------- - G: graph + G : graph The graph to be hashed. Can have node and/or edge attributes. Can also have no attributes. - edge_attr: string, default=None + edge_attr : string, optional (default=None) The key in edge attribute dictionary to be used for hashing. If None, edge labels are ignored. - node_attr: string, default=None + node_attr : string, optional (default=None) The key in node attribute dictionary to be used for hashing. If None, and no edge_attr given, use the degrees of the nodes as labels. - iterations: int, default=3 + If None, and edge_attr is given, each node starts with an identical label. + iterations : int, optional (default=3) Number of neighbor aggregations to perform. Should be larger for larger graphs. - digest_size: int, default=16 + digest_size : int, optional (default=16) Size (in bits) of blake2b hash digest to use for hashing node labels. - The default size is 16 bits + The default size is 16 bits. + include_initial_labels : bool, optional (default=False) + If True, include the hashed initial node label as the first subgraph + hash for each node. Returns ------- @@ -249,12 +259,10 @@ def weisfeiler_lehman_subgraph_hashes( ['a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc'] The first 2 WL subgraph hashes match. From this we can conclude that it's very - likely the neighborhood of 4 hops around these nodes are isomorphic: each - iteration aggregates 1-hop neighborhoods meaning hashes at depth $n$ are influenced - by every node within $2n$ hops. + likely the neighborhood of 2 hops around these nodes are isomorphic. - However the neighborhood of 6 hops is no longer isomorphic since their 3rd hash does - not match. + However the 3-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the + 3rd hashes in the lists above are not equal. These nodes may be candidates to be classified together since their local topology is similar. @@ -299,8 +307,13 @@ def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None): return new_labels node_labels = _init_node_labels(G, edge_attr, node_attr) + if include_initial_labels: + node_subgraph_hashes = { + k: [_hash_label(v, digest_size)] for k, v in node_labels.items() + } + else: + node_subgraph_hashes = defaultdict(list) - node_subgraph_hashes = defaultdict(list) for _ in range(iterations): node_labels = weisfeiler_lehman_step( G, node_labels, node_subgraph_hashes, edge_attr diff --git a/networkx/algorithms/tests/test_graph_hashing.py b/networkx/algorithms/tests/test_graph_hashing.py index cffa8bb22c18..0828069d1c3c 100644 --- a/networkx/algorithms/tests/test_graph_hashing.py +++ b/networkx/algorithms/tests/test_graph_hashing.py @@ -655,3 +655,32 @@ def test_digest_size_subgraph_hash(): assert hexdigest_sizes_correct(digest_size16_hashes, 16) assert hexdigest_sizes_correct(digest_size32_hashes, 32) + + +def test_initial_node_labels_subgraph_hash(): + """ + Including the hashed initial label prepends an extra hash to the lists + """ + G = nx.path_graph(5) + nx.set_node_attributes(G, {i: int(0 < i < 4) for i in G}, "label") + # initial node labels: + # 0--1--1--1--0 + + without_initial_label = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label") + assert all(len(v) == 3 for v in without_initial_label.values()) + # 3 different 1 hop nhds + assert len({v[0] for v in without_initial_label.values()}) == 3 + + with_initial_label = nx.weisfeiler_lehman_subgraph_hashes( + G, node_attr="label", include_initial_labels=True + ) + assert all(len(v) == 4 for v in with_initial_label.values()) + # 2 different initial labels + assert len({v[0] for v in with_initial_label.values()}) == 2 + + # check hashes match otherwise + for u in G: + for a, b in zip( + with_initial_label[u][1:], without_initial_label[u], strict=True + ): + assert a == b From d024ff4d432e5357f0360452a52912a111d8204e Mon Sep 17 00:00:00 2001 From: Aaron Z <40212329+aaronzo@users.noreply.github.com> Date: Mon, 11 Mar 2024 16:04:26 +0000 Subject: [PATCH 34/47] Add aaronzo as contributor (#7342) --- doc/developer/about_us.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/developer/about_us.rst b/doc/developer/about_us.rst index cbab8931706e..fbc801fa8e80 100644 --- a/doc/developer/about_us.rst +++ b/doc/developer/about_us.rst @@ -151,6 +151,7 @@ to add your name to the bottom of the list. - Davide D'Ascenzo, Github: `https://github.com/kidara` - Flavio Furia, Github: `https://github.com/flaviofuria` - Sebastiano Vigna, Github: `https://github.com/vigna` +- Aaron Zolnai-Lucas, GitHub: `aaronzo `_, LinkedIn: `aaronzolnailucas `_ A supplementary (but still incomplete) list of contributors is given by the list of names that have commits in ``networkx``'s From 2ece02c1b664bb6546fbdf152cbd0e301956c88a Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Mon, 11 Mar 2024 16:36:14 -0500 Subject: [PATCH 35/47] Add eriknw as contributor (#7343) --- doc/developer/about_us.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/developer/about_us.rst b/doc/developer/about_us.rst index fbc801fa8e80..20ac851674e6 100644 --- a/doc/developer/about_us.rst +++ b/doc/developer/about_us.rst @@ -152,6 +152,7 @@ to add your name to the bottom of the list. - Flavio Furia, Github: `https://github.com/flaviofuria` - Sebastiano Vigna, Github: `https://github.com/vigna` - Aaron Zolnai-Lucas, GitHub: `aaronzo `_, LinkedIn: `aaronzolnailucas `_ +- Erik Welch, GitHub: `eriknw `_, LinkedIn: `eriknwelch `_ A supplementary (but still incomplete) list of contributors is given by the list of names that have commits in ``networkx``'s From e517554b89ab99608cfe9576bc40e46d8a0424b6 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Wed, 13 Mar 2024 11:39:15 -0500 Subject: [PATCH 36/47] Fix #7339. `shortest_path` inconsisitent with warning (#7341) --- networkx/algorithms/centrality/reaching.py | 2 +- networkx/algorithms/shortest_paths/generic.py | 6 +++--- networkx/algorithms/shortest_paths/tests/test_generic.py | 4 +++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index 63ecc216eae5..93cb75a96416 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -112,7 +112,7 @@ def as_distance(u, v, d): # TODO This can be trivially parallelized. lrc = [ centrality(G, node, paths=paths, weight=weight, normalized=normalized) - for node, paths in shortest_paths + for node, paths in shortest_paths.items() ] max_lrc = max(lrc) diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index 81b72419acec..c1ed69808c7e 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -149,11 +149,11 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): # Find paths between all pairs. if method == "unweighted": - paths = nx.all_pairs_shortest_path(G) + paths = dict(nx.all_pairs_shortest_path(G)) elif method == "dijkstra": - paths = nx.all_pairs_dijkstra_path(G, weight=weight) + paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) else: # method == 'bellman-ford': - paths = nx.all_pairs_bellman_ford_path(G, weight=weight) + paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight)) else: # Find paths from all nodes co-accessible to the target. if G.is_directed(): diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py index 9fcc8c396d57..e30de51771eb 100644 --- a/networkx/algorithms/shortest_paths/tests/test_generic.py +++ b/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -212,7 +212,9 @@ def test_single_source_all_shortest_paths(self): assert sorted(ans[4]) == [[4]] def test_all_pairs_shortest_path(self): - p = dict(nx.shortest_path(self.cycle)) + # shortest_path w/o source and target will return a generator instead of + # a dict beginning in version 3.5. Only the first call needs changed here. + p = nx.shortest_path(self.cycle) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_shortest_path(self.cycle)) p = dict(nx.shortest_path(self.grid)) From 929b5ad054fcdc324131593276eb02d474986da2 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 13 Mar 2024 17:42:43 +0100 Subject: [PATCH 37/47] Add better error message when trying to get edge that is not present (#7245) * Add better error message when trying to get edge that is not present * reraise KeyError instead for NetworkXError * Update networkx/classes/tests/test_reportviews.py Co-authored-by: Dan Schult * Excape regex pattern * Minor updates, simplify exception and tests. --------- Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- networkx/classes/reportviews.py | 5 ++++- networkx/classes/tests/test_reportviews.py | 6 +++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py index 264823539a68..5f9397f82a77 100644 --- a/networkx/classes/reportviews.py +++ b/networkx/classes/reportviews.py @@ -1084,7 +1084,10 @@ def __getitem__(self, e): f"try list(G.edges)[{e.start}:{e.stop}:{e.step}]" ) u, v = e - return self._adjdict[u][v] + try: + return self._adjdict[u][v] + except KeyError as ex: # Customize msg to indicate exception origin + raise KeyError(f"The edge {e} is not in the graph.") # EdgeDataView methods def __call__(self, nbunch=None, data=False, *, default=None): diff --git a/networkx/classes/tests/test_reportviews.py b/networkx/classes/tests/test_reportviews.py index a68d6eb82298..262dbfab3169 100644 --- a/networkx/classes/tests/test_reportviews.py +++ b/networkx/classes/tests/test_reportviews.py @@ -596,9 +596,13 @@ def test_getitem(self): assert ev[0, 1] == {"foo": "bar"} # slicing - with pytest.raises(nx.NetworkXError): + with pytest.raises(nx.NetworkXError, match=".*does not support slicing"): G.edges[0:5] + # Invalid edge + with pytest.raises(KeyError, match=r".*edge.*is not in the graph."): + G.edges[0, 9] + def test_call(self): ev = self.eview(self.G) assert id(ev) == id(ev()) From f336cf28ae57e157341596197a5868368081d901 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Fri, 15 Mar 2024 14:00:43 -0500 Subject: [PATCH 38/47] Add `nx.config` dict for configuring dispatching and backends (#7225) * Add `nx.backend_config` dict for configuring dispatching and backends * Rename `nx.backend_config` to `nx.config` * "fallback_to_nx" is for testing, not for user config * Move config of backends to e.g. `nx.config["backends"]["cugraph"]` * How do you like this mypy?! * Rename `automatic_backends` to `backend_priority` (and env variables) * Create a class to handle configuration * Oops thanks mypy * Fix to work with more strict config * Support (and test) default values * Remove `__class_getitem__` and add docstring * Allow `strict=False` when defining subclasses. This allows configs to be added and deleted. * Move `__init_subclass__` --- networkx/__init__.py | 2 +- .../algorithms/operators/tests/test_binary.py | 2 +- networkx/classes/tests/test_backends.py | 4 +- networkx/conftest.py | 16 +- networkx/utils/backends.py | 43 +++- networkx/utils/configs.py | 228 ++++++++++++++++++ networkx/utils/tests/test_config.py | 180 ++++++++++++++ 7 files changed, 452 insertions(+), 23 deletions(-) create mode 100644 networkx/utils/configs.py create mode 100644 networkx/utils/tests/test_config.py diff --git a/networkx/__init__.py b/networkx/__init__.py index 72d2ef6562f9..39381a3ecce1 100644 --- a/networkx/__init__.py +++ b/networkx/__init__.py @@ -17,7 +17,7 @@ from networkx.exception import * from networkx import utils -from networkx.utils.backends import _dispatchable +from networkx.utils.backends import _dispatchable, config from networkx import classes from networkx.classes import filters diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py index 9693e6332f56..c2e9a00455ef 100644 --- a/networkx/algorithms/operators/tests/test_binary.py +++ b/networkx/algorithms/operators/tests/test_binary.py @@ -53,7 +53,7 @@ def test_intersection(): assert set(I2.nodes()) == {1, 2, 3, 4} assert sorted(I2.edges()) == [(2, 3)] # Only test if not performing auto convert testing of backend implementations - if not nx.utils.backends._dispatchable._automatic_backends: + if not nx.config["backend_priority"]: with pytest.raises(TypeError): nx.intersection(G2, H) with pytest.raises(TypeError): diff --git a/networkx/classes/tests/test_backends.py b/networkx/classes/tests/test_backends.py index cc171cf5be13..855a3e69fbcd 100644 --- a/networkx/classes/tests/test_backends.py +++ b/networkx/classes/tests/test_backends.py @@ -31,8 +31,8 @@ def test_pickle(): @pytest.mark.skipif( - "not nx._dispatchable._automatic_backends " - "or nx._dispatchable._automatic_backends[0] != 'nx-loopback'" + "not nx.config['backend_priority'] " + "or nx.config['backend_priority'][0] != 'nx-loopback'" ) def test_graph_converter_needs_backend(): # When testing, `nx.from_scipy_sparse_array` will *always* call the backend diff --git a/networkx/conftest.py b/networkx/conftest.py index 2f5f6cfdaf75..9b9ef6d2f4d1 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -45,12 +45,6 @@ def pytest_configure(config): backend = config.getoption("--backend") if backend is None: backend = os.environ.get("NETWORKX_TEST_BACKEND") - if backend: - networkx.utils.backends._dispatchable._automatic_backends = [backend] - fallback_to_nx = config.getoption("--fallback-to-nx") - if not fallback_to_nx: - fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") - networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx) # nx-loopback backend is only available when testing backends = entry_points(name="nx-loopback", group="networkx.backends") if backends: @@ -64,16 +58,22 @@ def pytest_configure(config): " Try `pip install -e .`, or change your PYTHONPATH\n" " Make sure python finds the networkx repo you are testing\n\n" ) + if backend: + networkx.config["backend_priority"] = [backend] + fallback_to_nx = config.getoption("--fallback-to-nx") + if not fallback_to_nx: + fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") + networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx) def pytest_collection_modifyitems(config, items): # Setting this to True here allows tests to be set up before dispatching # any function call to a backend. networkx.utils.backends._dispatchable._is_testing = True - if automatic_backends := networkx.utils.backends._dispatchable._automatic_backends: + if backend_priority := networkx.config["backend_priority"]: # Allow pluggable backends to add markers to tests (such as skip or xfail) # when running in auto-conversion test mode - backend = networkx.utils.backends.backends[automatic_backends[0]].load() + backend = networkx.utils.backends.backends[backend_priority[0]].load() if hasattr(backend, "on_start_tests"): getattr(backend, "on_start_tests")(items) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 6c1ab0498b14..b98b94171480 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -106,7 +106,7 @@ class WrappedSparse: from ..exception import NetworkXNotImplemented from .decorators import argmap -__all__ = ["_dispatchable"] +__all__ = ["_dispatchable", "config"] def _do_nothing(): @@ -142,6 +142,31 @@ def _get_backends(group, *, load_and_call=False): backends = _get_backends("networkx.backends") backend_info = _get_backends("networkx.backend_info", load_and_call=True) +# We must import from config after defining `backends` above +from .configs import Config, config + +# Get default configuration from environment variables at import time +config.backend_priority = [ + x.strip() + for x in os.environ.get( + "NETWORKX_BACKEND_PRIORITY", + os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", ""), + ).split(",") + if x.strip() +] +# Initialize default configuration for backends +config.backends = Config( + **{ + backend: ( + cfg if isinstance(cfg := info["default_config"], Config) else Config(**cfg) + ) + if "default_config" in info + else Config() + for backend, info in backend_info.items() + } +) +type(config.backends).__doc__ = "All installed NetworkX backends and their configs." + # Load and cache backends on-demand _loaded_backends = {} # type: ignore[var-annotated] @@ -180,11 +205,6 @@ class _dispatchable: _fallback_to_nx = ( os.environ.get("NETWORKX_FALLBACK_TO_NX", "true").strip().lower() == "true" ) - _automatic_backends = [ - x.strip() - for x in os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", "").split(",") - if x.strip() - ] def __new__( cls, @@ -532,11 +552,12 @@ def __call__(self, /, *args, backend=None, **kwargs): for g in graphs_resolved.values() } - if self._is_testing and self._automatic_backends and backend_name is None: + backend_priority = config.backend_priority + if self._is_testing and backend_priority and backend_name is None: # Special path if we are running networkx tests with a backend. # This even runs for (and handles) functions that mutate input graphs. return self._convert_and_call_for_tests( - self._automatic_backends[0], + backend_priority[0], args, kwargs, fallback_to_nx=self._fallback_to_nx, @@ -563,7 +584,7 @@ def __call__(self, /, *args, backend=None, **kwargs): raise ImportError(f"Unable to load backend: {graph_backend_name}") if ( "networkx" in graph_backend_names - and graph_backend_name not in self._automatic_backends + and graph_backend_name not in backend_priority ): # Not configured to convert networkx graphs to this backend raise TypeError( @@ -584,7 +605,7 @@ def __call__(self, /, *args, backend=None, **kwargs): ) # All graphs are backend graphs--no need to convert! return getattr(backend, self.name)(*args, **kwargs) - # Future work: try to convert and run with other backends in self._automatic_backends + # Future work: try to convert and run with other backends in backend_priority raise NetworkXNotImplemented( f"'{self.name}' not implemented by {graph_backend_name}" ) @@ -622,7 +643,7 @@ def __call__(self, /, *args, backend=None, **kwargs): ) ): # Should we warn or log if we don't convert b/c the input will be mutated? - for backend_name in self._automatic_backends: + for backend_name in backend_priority: if self._should_backend_run(backend_name, *args, **kwargs): return self._convert_and_call( backend_name, diff --git a/networkx/utils/configs.py b/networkx/utils/configs.py new file mode 100644 index 000000000000..8ccd81777b2a --- /dev/null +++ b/networkx/utils/configs.py @@ -0,0 +1,228 @@ +import collections +import typing +from dataclasses import dataclass + +__all__ = ["Config", "config"] + + +@dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False) +class Config: + """The base class for NetworkX configuration. + + There are two ways to use this to create configurations. The first is to + simply pass the initial configuration as keyword arguments to ``Config``: + + >>> cfg = Config(eggs=1, spam=5) + >>> cfg + Config(eggs=1, spam=5) + + The second--and preferred--way is to subclass ``Config`` with docs and annotations. + + >>> class MyConfig(Config): + ... '''Breakfast!''' + ... + ... eggs: int + ... spam: int + ... + ... def _check_config(self, key, value): + ... assert isinstance(value, int) and value >= 0 + >>> cfg = MyConfig(eggs=1, spam=5) + + Once defined, config items may be modified, but can't be added or deleted by default. + ``Config`` is a ``Mapping``, and can get and set configs via attributes or brackets: + + >>> cfg.eggs = 2 + >>> cfg.eggs + 2 + >>> cfg["spam"] = 42 + >>> cfg["spam"] + 42 + + Subclasses may also define ``_check_config`` (as done in the example above) + to ensure the value being assigned is valid: + + >>> cfg.spam = -1 + Traceback (most recent call last): + ... + AssertionError + + If a more flexible configuration object is needed that allows adding and deleting + configurations, then pass ``strict=False`` when defining the subclass: + + >>> class FlexibleConfig(Config, strict=False): + ... default_greeting: str = "Hello" + >>> flexcfg = FlexibleConfig() + >>> flexcfg.name = "Mr. Anderson" + >>> flexcfg + FlexibleConfig(default_greeting='Hello', name='Mr. Anderson') + """ + + def __init_subclass__(cls, strict=True): + cls._strict = strict + + def __new__(cls, **kwargs): + orig_class = cls + if cls is Config: + # Enable the "simple" case of accepting config definition as keywords + cls = type( + cls.__name__, + (cls,), + {"__annotations__": {key: typing.Any for key in kwargs}}, + ) + cls = dataclass( + eq=False, + repr=cls._strict, + slots=cls._strict, + kw_only=True, + match_args=False, + )(cls) + if not cls._strict: + cls.__repr__ = _flexible_repr + cls._orig_class = orig_class # Save original class so we can pickle + instance = object.__new__(cls) + instance.__init__(**kwargs) + return instance + + def _check_config(self, key, value): + """Check whether config value is valid. This is useful for subclasses.""" + + # Control behavior of attributes + def __dir__(self): + return self.__dataclass_fields__.keys() + + def __setattr__(self, key, value): + if self._strict and key not in self.__dataclass_fields__: + raise AttributeError(f"Invalid config name: {key!r}") + self._check_config(key, value) + object.__setattr__(self, key, value) + + def __delattr__(self, key): + if self._strict: + raise TypeError( + f"Configuration items can't be deleted (can't delete {key!r})." + ) + object.__delattr__(self, key) + + # Be a `collection.abc.Collection` + def __contains__(self, key): + return ( + key in self.__dataclass_fields__ if self._strict else key in self.__dict__ + ) + + def __iter__(self): + return iter(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __len__(self): + return len(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __reversed__(self): + return reversed(self.__dataclass_fields__ if self._strict else self.__dict__) + + # Add dunder methods for `collections.abc.Mapping` + def __getitem__(self, key): + try: + return getattr(self, key) + except AttributeError as err: + raise KeyError(*err.args) from None + + def __setitem__(self, key, value): + try: + setattr(self, key, value) + except AttributeError as err: + raise KeyError(*err.args) from None + + __delitem__ = __delattr__ + _ipython_key_completions_ = __dir__ # config[" + + # Go ahead and make it a `collections.abc.Mapping` + def get(self, key, default=None): + return getattr(self, key, default) + + def items(self): + return collections.abc.ItemsView(self) + + def keys(self): + return collections.abc.KeysView(self) + + def values(self): + return collections.abc.ValuesView(self) + + # dataclass can define __eq__ for us, but do it here so it works after pickling + def __eq__(self, other): + if not isinstance(other, Config): + return NotImplemented + return self._orig_class == other._orig_class and self.items() == other.items() + + # Make pickle work + def __reduce__(self): + return self._deserialize, (self._orig_class, dict(self)) + + @staticmethod + def _deserialize(cls, kwargs): + return cls(**kwargs) + + +def _flexible_repr(self): + return ( + f"{self.__class__.__qualname__}(" + + ", ".join(f"{key}={val!r}" for key, val in self.__dict__.items()) + + ")" + ) + + +# Register, b/c `Mapping.__subclasshook__` returns `NotImplemented` +collections.abc.Mapping.register(Config) + + +class NetworkXConfig(Config): + """Configuration for NetworkX that controls behaviors such as how to use backends. + + Attribute and bracket notation are supported for getting and setting configurations: + + >>> nx.config.backend_priority == nx.config["backend_priority"] + True + + Config Parameters + ----------------- + backend_priority : list of backend names + Enable automatic conversion of graphs to backend graphs for algorithms + implemented by the backend. Priority is given to backends listed earlier. + + backends : Config mapping of backend names to backend Config + The keys of the Config mapping are names of all installed NetworkX backends, + and the values are their configurations as Config mappings. + """ + + backend_priority: list[str] + backends: Config + + def _check_config(self, key, value): + from .backends import backends + + if key == "backend_priority": + if not (isinstance(value, list) and all(isinstance(x, str) for x in value)): + raise TypeError( + f"{key!r} config must be a list of backend names; got {value!r}" + ) + if missing := {x for x in value if x not in backends}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + elif key == "backends": + if not ( + isinstance(value, Config) + and all(isinstance(key, str) for key in value) + and all(isinstance(val, Config) for val in value.values()) + ): + raise TypeError( + f"{key!r} config must be a Config of backend configs; got {value!r}" + ) + if missing := {x for x in value if x not in backends}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + + +# Backend configuration will be updated in backends.py +config = NetworkXConfig( + backend_priority=[], + backends=Config(), +) diff --git a/networkx/utils/tests/test_config.py b/networkx/utils/tests/test_config.py new file mode 100644 index 000000000000..5c9cc2f972c4 --- /dev/null +++ b/networkx/utils/tests/test_config.py @@ -0,0 +1,180 @@ +import collections +import pickle + +import pytest + +import networkx as nx +from networkx.utils.configs import Config + + +# Define this at module level so we can test pickling +class ExampleConfig(Config): + """Example configuration.""" + + x: int + y: str + + def _check_config(self, key, value): + if key == "x" and value <= 0: + raise ValueError("x must be positive") + if key == "y" and not isinstance(value, str): + raise TypeError("y must be a str") + + +class EmptyConfig(Config): + pass + + +@pytest.mark.parametrize("cfg", [EmptyConfig(), Config()]) +def test_config_empty(cfg): + assert dir(cfg) == [] + with pytest.raises(AttributeError): + cfg.x = 1 + with pytest.raises(KeyError): + cfg["x"] = 1 + with pytest.raises(AttributeError): + cfg.x + with pytest.raises(KeyError): + cfg["x"] + assert len(cfg) == 0 + assert "x" not in cfg + assert cfg == cfg + assert cfg.get("x", 2) == 2 + assert set(cfg.keys()) == set() + assert set(cfg.values()) == set() + assert set(cfg.items()) == set() + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert isinstance(cfg, collections.abc.Collection) + assert isinstance(cfg, collections.abc.Mapping) + + +def test_config_subclass(): + with pytest.raises(TypeError, match="missing 2 required keyword-only"): + ExampleConfig() + with pytest.raises(ValueError, match="x must be positive"): + ExampleConfig(x=0, y="foo") + with pytest.raises(TypeError, match="unexpected keyword"): + ExampleConfig(x=1, y="foo", z="bad config") + with pytest.raises(TypeError, match="unexpected keyword"): + EmptyConfig(z="bad config") + cfg = ExampleConfig(x=1, y="foo") + assert cfg.x == 1 + assert cfg["x"] == 1 + assert cfg["y"] == "foo" + assert cfg.y == "foo" + assert "x" in cfg + assert "y" in cfg + assert "z" not in cfg + assert len(cfg) == 2 + assert set(iter(cfg)) == {"x", "y"} + assert set(cfg.keys()) == {"x", "y"} + assert set(cfg.values()) == {1, "foo"} + assert set(cfg.items()) == {("x", 1), ("y", "foo")} + assert dir(cfg) == ["x", "y"] + cfg.x = 2 + cfg["y"] = "bar" + assert cfg["x"] == 2 + assert cfg.y == "bar" + with pytest.raises(TypeError, match="can't be deleted"): + del cfg.x + with pytest.raises(TypeError, match="can't be deleted"): + del cfg["y"] + assert cfg.x == 2 + assert cfg == cfg + assert cfg == ExampleConfig(x=2, y="bar") + assert cfg != ExampleConfig(x=3, y="baz") + assert cfg != Config(x=2, y="bar") + with pytest.raises(TypeError, match="y must be a str"): + cfg["y"] = 5 + with pytest.raises(ValueError, match="x must be positive"): + cfg.x = -5 + assert cfg.get("x", 10) == 2 + with pytest.raises(AttributeError): + cfg.z = 5 + with pytest.raises(KeyError): + cfg["z"] = 5 + with pytest.raises(AttributeError): + cfg.z + with pytest.raises(KeyError): + cfg["z"] + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert cfg.__doc__ == "Example configuration." + assert cfg2.__doc__ == "Example configuration." + + +def test_config_defaults(): + class DefaultConfig(Config): + x: int = 0 + y: int + + cfg = DefaultConfig(y=1) + assert cfg.x == 0 + cfg = DefaultConfig(x=2, y=1) + assert cfg.x == 2 + + +def test_nxconfig(): + assert isinstance(nx.config.backend_priority, list) + assert isinstance(nx.config.backends, Config) + with pytest.raises(TypeError, match="must be a list of backend names"): + nx.config.backend_priority = "nx_loopback" + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backend_priority = ["this_almost_certainly_is_not_a_backend"] + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = {} + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = Config(plausible_backend_name={}) + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backends = Config(this_almost_certainly_is_not_a_backend=Config()) + + +def test_not_strict(): + class FlexibleConfig(Config, strict=False): + x: int + + cfg = FlexibleConfig(x=1) + assert "_strict" not in cfg + assert len(cfg) == 1 + assert list(cfg) == ["x"] + assert list(cfg.keys()) == ["x"] + assert list(cfg.values()) == [1] + assert list(cfg.items()) == [("x", 1)] + assert cfg.x == 1 + assert cfg["x"] == 1 + assert "x" in cfg + assert hasattr(cfg, "x") + assert "FlexibleConfig(x=1)" in repr(cfg) + assert cfg == FlexibleConfig(x=1) + del cfg.x + assert "FlexibleConfig()" in repr(cfg) + assert len(cfg) == 0 + assert not hasattr(cfg, "x") + assert "x" not in cfg + assert not hasattr(cfg, "y") + assert "y" not in cfg + cfg.y = 2 + assert len(cfg) == 1 + assert list(cfg) == ["y"] + assert list(cfg.keys()) == ["y"] + assert list(cfg.values()) == [2] + assert list(cfg.items()) == [("y", 2)] + assert cfg.y == 2 + assert cfg["y"] == 2 + assert hasattr(cfg, "y") + assert "y" in cfg + del cfg["y"] + assert len(cfg) == 0 + assert list(cfg) == [] + with pytest.raises(TypeError, match="missing 1 required keyword-only"): + FlexibleConfig() + # Be strict when first creating the config object + with pytest.raises(TypeError, match="unexpected keyword argument 'y'"): + FlexibleConfig(x=1, y=2) + + class FlexibleConfigWithDefault(Config, strict=False): + x: int = 0 + + assert FlexibleConfigWithDefault().x == 0 + assert FlexibleConfigWithDefault(x=1)["x"] == 1 From 984f737bf4e79ae373ffa3b6c7329082806041db Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Sat, 16 Mar 2024 00:36:25 +0530 Subject: [PATCH 39/47] Improve test coverage for Steiner Tree & Docs (#7348) * Improve test coverage for Steiner Tree & Docs * Update networkx/algorithms/approximation/steinertree.py Co-authored-by: Ross Barnowski * Update networkx/algorithms/approximation/steinertree.py Co-authored-by: Ross Barnowski * Update networkx/algorithms/approximation/tests/test_steinertree.py Co-authored-by: Ross Barnowski * Update test_steinertree.py * Update test_steinertree.py * Update dispatch decorator. --------- Co-authored-by: Ross Barnowski --- networkx/algorithms/approximation/steinertree.py | 12 ++++++++++-- .../approximation/tests/test_steinertree.py | 16 ++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 9b5c99488da6..c6c834f422c7 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -126,7 +126,7 @@ def _remove_nonterminal_leaves(G, terminals): @not_implemented_for("directed") -@nx._dispatchable(edge_attrs="weight", returns_graph=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def steiner_tree(G, terminal_nodes, weight="weight", method=None): r"""Return an approximation to the minimum Steiner tree of a graph. @@ -164,7 +164,7 @@ def steiner_tree(G, terminal_nodes, weight="weight", method=None): Use the edge attribute specified by this string as the edge weight. Any edge attribute not present defaults to 1. - method : string, optional (default = 'kou') + method : string, optional (default = 'mehlhorn') The algorithm to use to approximate the Steiner tree. Supported options: 'kou', 'mehlhorn'. Other inputs produce a ValueError. @@ -175,6 +175,14 @@ def steiner_tree(G, terminal_nodes, weight="weight", method=None): Approximation to the minimum steiner tree of `G` induced by `terminal_nodes` . + Raises + ------ + NetworkXNotImplemented + If `G` is directed. + + ValueError + If the specified `method` is not supported. + Notes ----- For multigraphs, the edge between two nodes with minimum weight is the diff --git a/networkx/algorithms/approximation/tests/test_steinertree.py b/networkx/algorithms/approximation/tests/test_steinertree.py index 95733f16ed19..23c3193e42ef 100644 --- a/networkx/algorithms/approximation/tests/test_steinertree.py +++ b/networkx/algorithms/approximation/tests/test_steinertree.py @@ -208,3 +208,19 @@ def test_steiner_tree_multigraph_weight_attribute(method): H = nx.approximation.steiner_tree(G, list(G), method=method, weight="distance") assert len(H.edges) == 2 and H.has_edge(2, 0, key=1) assert sum(dist for *_, dist in H.edges(data="distance")) == 15 + + +@pytest.mark.parametrize("method", (None, "mehlhorn", "kou")) +def test_steiner_tree_methods(method): + G = nx.star_graph(4) + expected = nx.Graph([(0, 1), (0, 3)]) + st = nx.approximation.steiner_tree(G, [1, 3], method=method) + assert nx.utils.edges_equal(st.edges, expected.edges) + + +def test_steiner_tree_method_invalid(): + G = nx.star_graph(4) + with pytest.raises( + ValueError, match="invalid_method is not a valid choice for an algorithm." + ): + nx.approximation.steiner_tree(G, terminal_nodes=[1, 3], method="invalid_method") From dd7f54966da811494cc6dd6946dd6d2332b30658 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:53:51 +0530 Subject: [PATCH 40/47] added `seed` to `test_richclub_normalized` (#7355) added seed to test_richclub_normalized --- networkx/algorithms/tests/test_richclub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/algorithms/tests/test_richclub.py b/networkx/algorithms/tests/test_richclub.py index 8d83abaea180..1bdb66847fdf 100644 --- a/networkx/algorithms/tests/test_richclub.py +++ b/networkx/algorithms/tests/test_richclub.py @@ -21,7 +21,7 @@ def test_richclub_seed(): def test_richclub_normalized(): G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) - rcNorm = nx.richclub.rich_club_coefficient(G, Q=2) + rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=42) assert rcNorm == {0: 1.0, 1: 1.0} From d218d09a9ddcb9355c0221d24028e26e40e825ab Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Wed, 20 Mar 2024 01:10:52 +0530 Subject: [PATCH 41/47] Add tests to link_prediction.py (#7357) --- networkx/algorithms/tests/test_link_prediction.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py index b4643b3420d4..0878496bc2aa 100644 --- a/networkx/algorithms/tests/test_link_prediction.py +++ b/networkx/algorithms/tests/test_link_prediction.py @@ -172,7 +172,12 @@ def test_notimplemented(self, graph_type): nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) - def test_node_not_found(self): + def test_node_u_not_found(self): + G = nx.Graph() + G.add_edges_from([(1, 3), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 1)]) + + def test_node_v_not_found(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) @@ -186,6 +191,12 @@ def test_equal_nodes(self): G = nx.complete_graph(4) assert pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], []) + def test_equal_nodes_with_alpha_one_raises_error(self): + G = nx.complete_graph(4) + assert pytest.raises( + nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [], alpha=1.0 + ) + def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) From 53c0513019e4ad31d8ad82df087a6d889f6cc782 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Tue, 19 Mar 2024 20:19:22 -0500 Subject: [PATCH 42/47] Fix pydot tests when testing backends (#7356) --- networkx/utils/backends.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index b98b94171480..776e13c9517f 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -924,7 +924,7 @@ def _convert_and_call_for_tests( from collections.abc import Iterable, Iterator, Mapping from copy import copy - from io import BufferedReader, BytesIO + from io import BufferedReader, BytesIO, StringIO, TextIOWrapper from itertools import tee from random import Random @@ -941,9 +941,12 @@ def _convert_and_call_for_tests( args1, args2 = zip( *( (arg, copy(arg)) - if isinstance(arg, BytesIO | Random | Generator | RandomState) + if isinstance( + arg, BytesIO | StringIO | Random | Generator | RandomState + ) else tee(arg) - if isinstance(arg, Iterator) and not isinstance(arg, BufferedReader) + if isinstance(arg, Iterator) + and not isinstance(arg, BufferedReader | TextIOWrapper) else (arg, arg) for arg in args ) @@ -954,9 +957,12 @@ def _convert_and_call_for_tests( kwargs1, kwargs2 = zip( *( ((k, v), (k, copy(v))) - if isinstance(v, BytesIO | Random | Generator | RandomState) + if isinstance( + v, BytesIO | StringIO | Random | Generator | RandomState + ) else ((k, (teed := tee(v))[0]), (k, teed[1])) - if isinstance(v, Iterator) and not isinstance(v, BufferedReader) + if isinstance(v, Iterator) + and not isinstance(v, BufferedReader | TextIOWrapper) else ((k, v), (k, v)) for k, v in kwargs.items() ) @@ -1121,13 +1127,15 @@ def check_iterator(it): "read_gml", "read_graph6", "read_sparse6", - # We don't handle io.BufferedReader arguments + # We don't handle io.BufferedReader or io.TextIOWrapper arguments "bipartite_read_edgelist", "read_adjlist", "read_edgelist", "read_graphml", "read_multiline_adjlist", "read_pajek", + "from_pydot", + "pydot_read_dot", # graph comparison fails b/c of nan values "read_gexf", }: From e71bd8afe68bfd3ef0bc31888f2d29177188fb79 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Tue, 19 Mar 2024 20:20:17 -0500 Subject: [PATCH 43/47] Make `is_negatively_weighted` dispatchable (#7352) --- networkx/classes/function.py | 1 + 1 file changed, 1 insertion(+) diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 726b3e23b26c..20aefa06680e 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -1038,6 +1038,7 @@ def is_weighted(G, edge=None, weight="weight"): return all(weight in data for u, v, data in G.edges(data=True)) +@nx._dispatchable(edge_attrs="weight") def is_negatively_weighted(G, edge=None, weight="weight"): """Returns True if `G` has negatively weighted edges. From bc94afe5d4a20777be1be78aa654c1f4233471e9 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Tue, 19 Mar 2024 18:22:52 -0700 Subject: [PATCH 44/47] Future proof xml parsing in graphml. (#7360) As of Python 3.12, the lxml tree parser raises a warning about evaluating truthiness of elements directly, instead recommending an is not None or len() test. --- networkx/readwrite/graphml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index 7d4fd7a66733..0b05e03a3b44 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -982,7 +982,7 @@ def decode_data_elements(self, graphml_keys, obj_xml): node_label = None # set GenericNode's configuration as shape type gn = data_element.find(f"{{{self.NS_Y}}}GenericNode") - if gn: + if gn is not None: data["shape_type"] = gn.get("configuration") for node_type in ["GenericNode", "ShapeNode", "SVGNode", "ImageNode"]: pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}" From 1b34ffeaefdeeaf0d013d1dcce0675adb50d30f7 Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Wed, 20 Mar 2024 16:39:53 -0400 Subject: [PATCH 45/47] make doc_string examples order-independent by removing np.set_printoptions (#7361) remove np.set_printoptions from normalized_laplacian_matrix doc_string example --- networkx/linalg/laplacianmatrix.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index 1df4b422b76b..f68c6614d2f5 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -179,7 +179,6 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): -------- >>> import numpy as np - >>> np.set_printoptions(precision=4) # To print with lower precision >>> edges = [ ... (1, 2), ... (2, 1), @@ -189,10 +188,10 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): ... ] >>> DiG = nx.DiGraph(edges) >>> print(nx.normalized_laplacian_matrix(DiG).toarray()) - [[ 1. -0.7071 0. 0. ] - [-0.7071 1. -0.7071 0. ] - [ 0. 0. 1. -1. ] - [ 0. 0. -1. 1. ]] + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. -0.70710678 0. ] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] Notice that node 4 is represented by the third column and row. This is because by default the row/column order is the order of `G.nodes` (i.e. the node added @@ -200,16 +199,16 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): To control the node order of the matrix, use the `nodelist` argument. >>> print(nx.normalized_laplacian_matrix(DiG, nodelist=[1, 2, 3, 4]).toarray()) - [[ 1. -0.7071 0. 0. ] - [-0.7071 1. 0. -0.7071] - [ 0. 0. 1. -1. ] - [ 0. 0. -1. 1. ]] + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. 0. -0.70710678] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] >>> G = nx.Graph(edges) >>> print(nx.normalized_laplacian_matrix(G).toarray()) - [[ 1. -0.7071 0. 0. ] - [-0.7071 1. -0.5 0. ] - [ 0. -0.5 1. -0.7071] - [ 0. 0. -0.7071 1. ]] + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. -0.5 0. ] + [ 0. -0.5 1. -0.70710678] + [ 0. 0. -0.70710678 1. ]] See Also -------- From 9ad788765bbc9650939e4ea4aa5ab6e3d1b2795e Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Thu, 21 Mar 2024 10:15:49 +0530 Subject: [PATCH 46/47] [DOC, DISPATCH] : updated and added `backend.py`'s docs (#7305) Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/reference/utils.rst | 7 +- networkx/utils/backends.py | 332 ++++++++++++++++++++++++------------- 2 files changed, 222 insertions(+), 117 deletions(-) diff --git a/doc/reference/utils.rst b/doc/reference/utils.rst index acd9c92584ee..ee67298ec266 100644 --- a/doc/reference/utils.rst +++ b/doc/reference/utils.rst @@ -76,10 +76,9 @@ Mapped Queue Backends -------- -.. note:: This is an experimental feature to dispatch your computations to an alternate - backend like `GraphBLAS `_ - instead of using pure Python dictionaries for computation. - Things will change and break in the future! +.. note:: NetworkX backends are experimental. They let you execute an alternate + backend implementation instead of NetworkX's pure Python dictionaries + implementation. Things will change and break in the future! .. automodule:: networkx.utils.backends .. autosummary:: diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 776e13c9517f..d417d9793313 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -1,100 +1,161 @@ """ -Code to support various backends in a plugin dispatch architecture. - -Create a Dispatcher -------------------- - -To be a valid backend, a package must register an entry_point -of `networkx.backends` with a key pointing to the handler. - -For example:: - - entry_points={'networkx.backends': 'sparse = networkx_backend_sparse'} - -The backend must create a Graph-like object which contains an attribute -``__networkx_backend__`` with a value of the entry point name. - -Continuing the example above:: - - class WrappedSparse: - __networkx_backend__ = "sparse" - ... - -When a dispatchable NetworkX algorithm encounters a Graph-like object -with a ``__networkx_backend__`` attribute, it will look for the associated -dispatch object in the entry_points, load it, and dispatch the work to it. - - -Testing -------- -To assist in validating the backend algorithm implementations, if an -environment variable ``NETWORKX_TEST_BACKEND`` is set to a registered -backend key, the dispatch machinery will automatically convert regular -networkx Graphs and DiGraphs to the backend equivalent by calling -``.convert_from_nx(G, edge_attrs=edge_attrs, name=name)``. -Set ``NETWORKX_FALLBACK_TO_NX`` environment variable to have tests -use networkx graphs for algorithms not implemented by the backend. - -The arguments to ``convert_from_nx`` are: - -- ``G`` : networkx Graph -- ``edge_attrs`` : dict, optional - Dict that maps edge attributes to default values if missing in ``G``. - If None, then no edge attributes will be converted and default may be 1. -- ``node_attrs``: dict, optional - Dict that maps node attribute to default values if missing in ``G``. - If None, then no node attributes will be converted. -- ``preserve_edge_attrs`` : bool - Whether to preserve all edge attributes. -- ``preserve_node_attrs`` : bool - Whether to preserve all node attributes. -- ``preserve_graph_attrs`` : bool - Whether to preserve all graph attributes. -- ``preserve_all_attrs`` : bool - Whether to preserve all graph, node, and edge attributes. -- ``name`` : str - The name of the algorithm. -- ``graph_name`` : str - The name of the graph argument being converted. - -The converted object is then passed to the backend implementation of -the algorithm. The result is then passed to -``.convert_to_nx(result, name=name)`` to convert back -to a form expected by the NetworkX tests. - -By defining ``convert_from_nx`` and ``convert_to_nx`` methods and setting -the environment variable, NetworkX will automatically route tests on -dispatchable algorithms to the backend, allowing the full networkx test -suite to be run against the backend implementation. - -Example pytest invocation:: - - NETWORKX_TEST_BACKEND=sparse pytest --pyargs networkx - -Dispatchable algorithms which are not implemented by the backend -will cause a ``pytest.xfail()``, giving some indication that not all -tests are working, while avoiding causing an explicit failure. - -If a backend only partially implements some algorithms, it can define -a ``can_run(name, args, kwargs)`` function that returns True or False -indicating whether it can run the algorithm with the given arguments. -It may also return a string indicating why the algorithm can't be run; -this string may be used in the future to give helpful info to the user. - -A backend may also define ``should_run(name, args, kwargs)`` that is similar -to ``can_run``, but answers whether the backend *should* be run (converting -if necessary). Like ``can_run``, it receives the original arguments so it -can decide whether it should be run by inspecting the arguments. ``can_run`` -runs before ``should_run``, so ``should_run`` may assume ``can_run`` is True. - -If not implemented by the backend, ``can_run`` and ``should_run`` are -assumed to always return True if the backend implements the algorithm. - -A special ``on_start_tests(items)`` function may be defined by the backend. -It will be called with the list of NetworkX tests discovered. Each item -is a test object that can be marked as xfail if the backend does not support -the test using `item.add_marker(pytest.mark.xfail(reason=...))`. +NetworkX utilizes a plugin-dispatch architecture, which means we can plug in and +out of backends with minimal code changes. A valid NetworkX backend specifies +`entry points `_, +named ``networkx.backends`` and an optional ``networkx.backend_info`` when it is +installed (not imported). This allows NetworkX to dispatch (redirect) function calls +to the backend so the execution flows to the designated backend +implementation, similar to how plugging a charger into a socket redirects the +electricity to your phone. This design enhances flexibility and integration, making +NetworkX more adaptable and efficient. + +For example, you can convert the NetworkX Graph object ``G`` into a Graph-like +object specific to the backend and then pass that in the NetworkX function:: + + H = nxp.ParallelGraph(G) + nx.betweenness_centrality(H, k=10) + +or you can specify the backend as a kwarg:: + + nx.betweenness_centrality(G, k=10, backend="parallel") + +Also, you might have seen the ``@nx._dispatchable`` decorator on many of the NetworkX +functions in the codebase. It is used to redirect the execution of a function to its +backend implementation and manage all the ``backend_kwargs``. When a dispatchable +NetworkX algorithm encounters a nx.Graph-like object with a ``__networkx_backend__`` +attribute, it will look for the associated dispatch object in the entry_points, load +it, and dispatch the computation work to it. Currently, the following are the trusted +backends of NetworkX: + +- `graphblas `_ +- `cugraph `_ +- `parallel `_ +- ``loopback`` is for testing purposes only and is not a real backend. + +Note that the ``backend_name`` is ``parallel`` and the package name is ``nx-parallel``, and +we use ``nx_parallel`` while installing and importing the package. + +Creating a Custom backend +------------------------- + +1. To be a valid backend that is discoverable by NetworkX, your package must + register an `entry-point `_ + ``networkx.backends`` in the package's metadata, with a `key pointing to your + dispatch object `_ . + For example, if you are using ``setuptools`` to manage your backend package, + you can `add the following to your pyproject.toml file `_:: + + [project.entry-points."networkx.backends"] + backend_name = "your_dispatcher_class" + + You can also add the ``backend_info`` entry-point. It points towards the ``get_info`` + function that returns all the backend information, which is then used to build the + "Additional Backend Implementation" box at the end of algorithm's documentation + page (e.g. `nx-cugraph's get_info function `_):: + + [project.entry-points."networkx.backend_info"] + backend_name = "your_get_info_function" + + Note that this would only work if your backend is a trusted backend of NetworkX, + and is present in the `.circleci/config.yml` and + `.github/workflows/deploy-docs.yml` files in the NetworkX repository. + +2. The backend must create an ``nx.Graph``-like object which contains an attribute + ``__networkx_backend__`` with a value of the entry point name:: + + class BackendGraph: + __networkx_backend__ = "backend_name" + ... + + +Testing the Custom backend +-------------------------- + +To test your custom backend, you can run the NetworkX test suite with your backend. +This also ensures that the custom backend is compatible with NetworkX's API. + +Environment Variable Setup +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable automatic testing with your custom backend, follow these steps: + +1. Set Backend Environment Variables: + - ``NETWORKX_TEST_BACKEND`` : Setting this to your registered backend key will let + the NetworkX's dispatch machinery automatically convert a regular NetworkX + ``Graph``, ``DiGraph``, ``MultiGraph``, etc. to their backend equivalents, using + ``your_dispatcher_class.convert_from_nx(G, ...)`` function. + - ``NETWORKX_FALLBACK_TO_NX`` (default=False) : Setting this variable to `True` will + instruct tests to use a NetworkX ``Graph`` for algorithms not implemented by your + custom backend. Setting this to `False` will only run the tests for algorithms + implemented by your custom backend and tests for other algorithms will ``xfail``. + +2. Defining ``convert_from_nx`` and ``convert_to_nx`` methods: + The arguments to ``convert_from_nx`` are: + + - ``G`` : NetworkX Graph + - ``edge_attrs`` : dict, optional + Dictionary mapping edge attributes to default values if missing in ``G``. + If None, then no edge attributes will be converted and default may be 1. + - ``node_attrs``: dict, optional + Dictionary mapping node attributes to default values if missing in ``G``. + If None, then no node attributes will be converted. + - ``preserve_edge_attrs`` : bool + Whether to preserve all edge attributes. + - ``preserve_node_attrs`` : bool + Whether to preserve all node attributes. + - ``preserve_graph_attrs`` : bool + Whether to preserve all graph attributes. + - ``preserve_all_attrs`` : bool + Whether to preserve all graph, node, and edge attributes. + - ``name`` : str + The name of the algorithm. + - ``graph_name`` : str + The name of the graph argument being converted. + +Running Tests +~~~~~~~~~~~~~ + +You can invoke NetworkX tests for your custom backend with the following commands:: + + NETWORKX_TEST_BACKEND= + NETWORKX_FALLBACK_TO_NX=True # or False + pytest --pyargs networkx + +Conversions while running tests : + +- Convert NetworkX graphs using ``.convert_from_nx(G, ...)`` into + the backend graph. +- Pass the backend graph objects to the backend implementation of the algorithm. +- Convert the result back to a form expected by NetworkX tests using + ``.convert_to_nx(result, ...)``. + +Notes +~~~~~ + +- Dispatchable algorithms that are not implemented by the backend + will cause a ``pytest.xfail``, giving some indication that not all + tests are running, while avoiding causing an explicit failure. + +- If a backend only partially implements some algorithms, it can define + a ``can_run(name, args, kwargs)`` function that returns True or False + indicating whether it can run the algorithm with the given arguments. + It may also return a string indicating why the algorithm can't be run; + this string may be used in the future to give helpful info to the user. + +- A backend may also define ``should_run(name, args, kwargs)`` that is similar + to ``can_run``, but answers whether the backend *should* be run (converting + if necessary). Like ``can_run``, it receives the original arguments so it + can decide whether it should be run by inspecting the arguments. ``can_run`` + runs before ``should_run``, so ``should_run`` may assume ``can_run`` is True. + If not implemented by the backend, ``can_run`` and ``should_run`` are + assumed to always return True if the backend implements the algorithm. + +- A special ``on_start_tests(items)`` function may be defined by the backend. + It will be called with the list of NetworkX tests discovered. Each item + is a test object that can be marked as xfail if the backend does not support + the test using ``item.add_marker(pytest.mark.xfail(reason=...))``. """ + import inspect import os import warnings @@ -103,7 +164,6 @@ class WrappedSparse: import networkx as nx -from ..exception import NetworkXNotImplemented from .decorators import argmap __all__ = ["_dispatchable", "config"] @@ -114,6 +174,27 @@ def _do_nothing(): def _get_backends(group, *, load_and_call=False): + """ + Retrieve NetworkX ``backends`` and ``backend_info`` from the entry points. + + Parameters + ----------- + group : str + The entry_point to be retrieved. + load_and_call : bool, optional + If True, load and call the backend. Defaults to False. + + Returns + -------- + dict + A dictionary mapping backend names to their respective backend objects. + + Notes + ------ + If a backend is defined more than once, a warning is issued. + The `nx-loopback` backend is removed if it exists, as it is only available during testing. + A warning is displayed if an error occurs while loading a backend. + """ items = entry_points(group=group) rv = {} for ep in items: @@ -134,7 +215,6 @@ def _get_backends(group, *, load_and_call=False): ) else: rv[ep.name] = ep - # nx-loopback backend is only available when testing (added in conftest.py) rv.pop("nx-loopback", None) return rv @@ -190,17 +270,19 @@ def _load_backend(backend_name): class _dispatchable: - # Allow any of the following decorator forms: - # - @_dispatchable - # - @_dispatchable() - # - @_dispatchable(name="override_name") - # - @_dispatchable(graphs="graph") - # - @_dispatchable(edge_attrs="weight") - # - @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) - - # These class attributes are currently used to allow backends to run networkx tests. - # For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx` - # Future work: add configuration to control these + """Allow any of the following decorator forms: + - @_dispatchable + - @_dispatchable() + - @_dispatchable(name="override_name") + - @_dispatchable(graphs="graph") + - @_dispatchable(edge_attrs="weight") + - @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) + + These class attributes are currently used to allow backends to run networkx tests. + For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx` + Future work: add configuration to control these. + """ + _is_testing = False _fallback_to_nx = ( os.environ.get("NETWORKX_FALLBACK_TO_NX", "true").strip().lower() == "true" @@ -221,16 +303,24 @@ def __new__( mutates_input=False, returns_graph=False, ): - """Dispatches to a backend algorithm based on input graph types. + """A decorator function that dispatches to ``func``'s backend implementation + based on the input graph types. Parameters ---------- - func : function + func : callable, optional + The function to be decorated. If ``func`` is not provided, returns a + partial object that can be used to decorate a function later. If ``func`` + is provided, returns a new callable object that dispatches to a backend + algorithm based on input graph types. name : str, optional The name of the algorithm to use for dispatching. If not provided, the name of ``func`` will be used. ``name`` is useful to avoid name conflicts, as all dispatched algorithms live in a single namespace. + For example, ``tournament.is_strongly_connected`` had a name conflict + with the standard ``nx.is_strongly_connected``, so we used + ``@_dispatchable(name="tournament_is_strongly_connected")``. graphs : str or dict or None, default "G" If a string, the parameter name of the graph, which must be the first @@ -430,6 +520,10 @@ def __new__( @property def __doc__(self): + """If the cached documentation exists, it is returned. + Otherwise, the documentation is generated using _make_doc() method, + cached, and then returned.""" + if (rv := self._cached_doc) is not None: return rv rv = self._cached_doc = self._make_doc() @@ -437,11 +531,17 @@ def __doc__(self): @__doc__.setter def __doc__(self, val): + """Sets the original documentation to the given value and resets the + cached documentation.""" + self._orig_doc = val self._cached_doc = None @property def __signature__(self): + """Return the signature of the original function, with the addition of + the `backend` and `backend_kwargs` parameters.""" + if self._sig is None: sig = inspect.signature(self.orig_func) # `backend` is now a reserved argument used by dispatching. @@ -475,6 +575,9 @@ def __signature__(self): return self._sig def __call__(self, /, *args, backend=None, **kwargs): + """Returns the result of the original function, or the backend function if + the backend is specified and that backend implements `func`.""" + if not backends: # Fast path if no backends are installed return self.orig_func(*args, **kwargs) @@ -606,7 +709,7 @@ def __call__(self, /, *args, backend=None, **kwargs): # All graphs are backend graphs--no need to convert! return getattr(backend, self.name)(*args, **kwargs) # Future work: try to convert and run with other backends in backend_priority - raise NetworkXNotImplemented( + raise nx.NetworkXNotImplemented( f"'{self.name}' not implemented by {graph_backend_name}" ) @@ -899,7 +1002,7 @@ def _convert_and_call(self, backend_name, args, kwargs, *, fallback_to_nx=False) backend_name, args, kwargs ) result = getattr(backend, self.name)(*converted_args, **converted_kwargs) - except (NotImplementedError, NetworkXNotImplemented) as exc: + except (NotImplementedError, nx.NetworkXNotImplemented) as exc: if fallback_to_nx: return self.orig_func(*args, **kwargs) raise @@ -974,7 +1077,7 @@ def _convert_and_call_for_tests( backend_name, args1, kwargs1 ) result = getattr(backend, self.name)(*converted_args, **converted_kwargs) - except (NotImplementedError, NetworkXNotImplemented) as exc: + except (NotImplementedError, nx.NetworkXNotImplemented) as exc: if fallback_to_nx: return self.orig_func(*args2, **kwargs2) import pytest @@ -1155,6 +1258,9 @@ def check_iterator(it): return converted_result def _make_doc(self): + """Generate the backends section at the end for functions having an alternate + backend implementation(s) using the `backend_info` entry-point.""" + if not self.backends: return self._orig_doc lines = [ From 0efae0238ef4e1fbe8b4a5d7d192edae8debc7f0 Mon Sep 17 00:00:00 2001 From: Till Hoffmann Date: Thu, 21 Mar 2024 09:09:47 -0400 Subject: [PATCH 47/47] Add option to hide or show tick labels. (#6018) * Add option to hide or show tick labels. * Update parameter description in docstring. --------- Co-authored-by: Ross Barnowski --- networkx/drawing/nx_pylab.py | 98 +++++++++++++++++++--------- networkx/drawing/tests/test_pylab.py | 22 +++++++ 2 files changed, 88 insertions(+), 32 deletions(-) diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py index 6171c0f7d9ae..7c25f63bf476 100644 --- a/networkx/drawing/nx_pylab.py +++ b/networkx/drawing/nx_pylab.py @@ -243,6 +243,11 @@ def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds): label : string, optional Label for graph legend + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + kwds : optional keywords See networkx.draw_networkx_nodes(), networkx.draw_networkx_edges(), and networkx.draw_networkx_labels() for a description of optional keywords. @@ -326,6 +331,7 @@ def draw_networkx_nodes( edgecolors=None, label=None, margins=None, + hide_ticks=True, ): """Draw the nodes of the graph G. @@ -390,6 +396,11 @@ def draw_networkx_nodes( be in the range ``[0, 1]``. See :meth:`matplotlib.axes.Axes.margins` for details. The default is `None`, which uses the Matplotlib default. + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + Returns ------- matplotlib.collections.PathCollection @@ -450,14 +461,15 @@ def draw_networkx_nodes( edgecolors=edgecolors, label=label, ) - ax.tick_params( - axis="both", - which="both", - bottom=False, - left=False, - labelbottom=False, - labelleft=False, - ) + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) if margins is not None: if isinstance(margins, Iterable): @@ -682,6 +694,7 @@ def draw_networkx_edges( connectionstyle="arc3", min_source_margin=0, min_target_margin=0, + hide_ticks=True, ): r"""Draw the edges of the graph G. @@ -781,6 +794,11 @@ def draw_networkx_edges( min_target_margin : int (default=0) The minimum margin (gap) at the end of the edge at the target. + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + Returns ------- matplotlib.collections.LineCollection or a list of matplotlib.patches.FancyArrowPatch @@ -1013,14 +1031,16 @@ def draw_networkx_edges( corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) ax.update_datalim(corners) ax.autoscale_view() - ax.tick_params( - axis="both", - which="both", - bottom=False, - left=False, - labelbottom=False, - labelleft=False, - ) + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) return edge_viz_obj @@ -1039,6 +1059,7 @@ def draw_networkx_labels( verticalalignment="center", ax=None, clip_on=True, + hide_ticks=True, ): """Draw node labels on the graph G. @@ -1087,6 +1108,11 @@ def draw_networkx_labels( clip_on : bool (default=True) Turn on clipping of node labels at axis boundaries + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + Returns ------- dict @@ -1138,14 +1164,15 @@ def draw_networkx_labels( ) text_items[n] = t - ax.tick_params( - axis="both", - which="both", - bottom=False, - left=False, - labelbottom=False, - labelleft=False, - ) + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) return text_items @@ -1169,6 +1196,7 @@ def draw_networkx_edge_labels( node_size=300, nodelist=None, connectionstyle="arc3", + hide_ticks=True, ): """Draw edge labels. @@ -1236,6 +1264,11 @@ def draw_networkx_edge_labels( `matplotlib.patches.FancyArrowPatch` for more info. If Iterable, index indicates i'th edge key of MultiGraph + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + Returns ------- dict @@ -1456,14 +1489,15 @@ def draw(self, renderer): ax=ax, ) - ax.tick_params( - axis="both", - which="both", - bottom=False, - left=False, - labelbottom=False, - labelleft=False, - ) + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) return text_items diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py index 38f92964d6e9..984e97b89c07 100644 --- a/networkx/drawing/tests/test_pylab.py +++ b/networkx/drawing/tests/test_pylab.py @@ -845,3 +845,25 @@ def test_no_warning_on_default_draw_arrowstyle(draw_fn): assert len(w) == 0 plt.delaxes(ax) + + +@pytest.mark.parametrize("hide_ticks", [False, True]) +@pytest.mark.parametrize( + "method", + [ + nx.draw_networkx, + nx.draw_networkx_edge_labels, + nx.draw_networkx_edges, + nx.draw_networkx_labels, + nx.draw_networkx_nodes, + ], +) +def test_hide_ticks(method, hide_ticks): + G = nx.path_graph(3) + pos = {n: (n, n) for n in G.nodes} + _, ax = plt.subplots() + method(G, pos=pos, ax=ax, hide_ticks=hide_ticks) + for axis in [ax.xaxis, ax.yaxis]: + assert bool(axis.get_ticklabels()) != hide_ticks + + plt.delaxes(ax)