diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 97bb856..d0182dd 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -20,4 +20,4 @@ jobs: - uses: actions/setup-python@v5 with: python-version: "3.10" - - uses: pre-commit/action@v3.0.0 + - uses: pre-commit/action@v3.0.1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e4525c5..c0f8cef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -33,13 +33,13 @@ repos: - id: name-tests-test args: ["--pytest-test-first"] - repo: https://github.com/abravalheri/validate-pyproject - rev: v0.15 + rev: v0.18 hooks: - id: validate-pyproject name: Validate pyproject.toml # I don't yet trust ruff to do what autoflake does - repo: https://github.com/PyCQA/autoflake - rev: v2.2.1 + rev: v2.3.1 hooks: - id: autoflake args: [--in-place] @@ -48,33 +48,34 @@ repos: hooks: - id: isort - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 + rev: v3.16.0 hooks: - id: pyupgrade args: [--py310-plus] - repo: https://github.com/MarcoGorelli/auto-walrus - rev: v0.2.2 + rev: 0.3.4 hooks: - id: auto-walrus + additional_dependencies: [tomli] args: [--line-length, "100"] - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.4.2 hooks: - id: black # - id: black-jupyter - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.5.0 hooks: - id: ruff args: [--fix-only, --show-fixes] - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.1.0 hooks: - id: flake8 additional_dependencies: &flake8_dependencies # These versions need updated manually - - flake8==6.1.0 - - flake8-bugbear==23.12.2 + - flake8==7.1.0 + - flake8-bugbear==24.2.6 - flake8-simplify==0.21.0 - repo: https://github.com/asottile/yesqa rev: v1.5.0 @@ -82,14 +83,14 @@ repos: - id: yesqa additional_dependencies: *flake8_dependencies - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell types_or: [python, rst, markdown] additional_dependencies: [tomli] files: ^(graphblas_algorithms|docs)/ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.5.0 hooks: - id: ruff # `pyroma` may help keep our package standards up to date if best practices change. @@ -100,6 +101,6 @@ repos: - id: pyroma args: [-n, "10", .] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: no-commit-to-branch # no commit directly to main diff --git a/README.md b/README.md index ed66df3..b6c7e1b 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/graphblas-algorithms)](https://pypi.python.org/pypi/graphblas-algorithms/) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/python-graphblas/graphblas-algorithms/blob/main/LICENSE)
-[![Tests](https://github.com/python-graphblas/graphblas-algorithms/workflows/Tests/badge.svg?branch=main)](https://github.com/python-graphblas/graphblas-algorithms/actions) +[![Tests](https://github.com/python-graphblas/graphblas-algorithms/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/python-graphblas/graphblas-algorithms/actions) [![Coverage](https://codecov.io/gh/python-graphblas/graphblas-algorithms/branch/main/graph/badge.svg)](https://codecov.io/gh/python-graphblas/graphblas-algorithms) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.7329185.svg)](https://doi.org/10.5281/zenodo.7329185) [![Discord](https://img.shields.io/badge/Chat-Discord-blue)](https://discord.com/invite/vur45CbwMz) @@ -91,7 +91,7 @@ T5 = nx.k_truss(G2, 5) ``` `G2` is not a `nx.Graph`, but it does have an attribute -`__networkx_plugin__ = "graphblas"`. This tells NetworkX to +`__networkx_backend__ = "graphblas"`. This tells NetworkX to dispatch the k_truss call to graphblas-algorithms. This link connection exists because graphblas-algorithms registers itself as a "networkx.plugin" entry point. diff --git a/graphblas_algorithms/algorithms/_bfs.py b/graphblas_algorithms/algorithms/_bfs.py index 8189aae..996aee0 100644 --- a/graphblas_algorithms/algorithms/_bfs.py +++ b/graphblas_algorithms/algorithms/_bfs.py @@ -1,4 +1,4 @@ -"""BFS routines used by other algorithms""" +"""BFS routines used by other algorithms.""" import numpy as np from graphblas import Matrix, Vector, binary, indexunary, replace, semiring, unary diff --git a/graphblas_algorithms/algorithms/_helpers.py b/graphblas_algorithms/algorithms/_helpers.py index 2c0a820..6b6f2e8 100644 --- a/graphblas_algorithms/algorithms/_helpers.py +++ b/graphblas_algorithms/algorithms/_helpers.py @@ -19,7 +19,7 @@ def normalize(x, how): def is_converged(xprev, x, tol): - """Check convergence, L1 norm: err = sum(abs(xprev - x)); err < N * tol + """Check convergence, L1 norm: ``err = sum(abs(xprev - x)); err < N * tol``. This modifies `xprev`. """ diff --git a/graphblas_algorithms/algorithms/shortest_paths/weighted.py b/graphblas_algorithms/algorithms/shortest_paths/weighted.py index a83a060..591ca6a 100644 --- a/graphblas_algorithms/algorithms/shortest_paths/weighted.py +++ b/graphblas_algorithms/algorithms/shortest_paths/weighted.py @@ -116,13 +116,14 @@ def bellman_ford_path_length(G, source, target): def bellman_ford_path_lengths(G, nodes=None, *, expand_output=False): - """Extra parameter: expand_output + """Extra parameter: expand_output. Parameters ---------- expand_output : bool, default False When False, the returned Matrix has one row per node in nodes. When True, the returned Matrix has the same shape as the input Matrix. + """ # Same algorithms as in `single_source_bellman_ford_path_length`, but with # `Cur` as a Matrix with each row corresponding to a source node. diff --git a/graphblas_algorithms/classes/_utils.py b/graphblas_algorithms/classes/_utils.py index ecf66d9..6638f9f 100644 --- a/graphblas_algorithms/classes/_utils.py +++ b/graphblas_algorithms/classes/_utils.py @@ -177,7 +177,7 @@ def matrix_to_vectornodemap(self, A): def matrix_to_dicts(self, A, *, use_row_index=False, use_column_index=False, values_are_keys=False): - """Convert a Matrix to a dict of dicts of the form ``{row: {col: val}}`` + """Convert a Matrix to a dict of dicts of the form ``{row: {col: val}}``. Use ``use_row_index=True`` to return the row index as keys in the dict, and likewise for `use_column_index=True``. @@ -256,7 +256,7 @@ def _cacheit(self, key, func, *args, **kwargs): def renumber_key_to_id(self, indices): - """Create `key_to_id` for e.g. a subgraph with node ids from `indices`""" + """Create `key_to_id` for e.g. a subgraph with node ids from `indices`.""" id_to_key = self.id_to_key return {id_to_key[index]: i for i, index in enumerate(indices)} # Alternative (about the same performance) diff --git a/graphblas_algorithms/classes/digraph.py b/graphblas_algorithms/classes/digraph.py index 1e9fe5f..5f7c89a 100644 --- a/graphblas_algorithms/classes/digraph.py +++ b/graphblas_algorithms/classes/digraph.py @@ -22,7 +22,7 @@ def get_AT(G, mask=None): - """A.T""" + """``A.T``.""" A = G._A cache = G._cache if "AT" not in cache: @@ -31,7 +31,7 @@ def get_AT(G, mask=None): def get_Up(G, mask=None): - """select.triu(A)""" + """``select.triu(A)``.""" A = G._A cache = G._cache if "U+" not in cache: @@ -50,7 +50,7 @@ def get_Up(G, mask=None): def get_Lp(G, mask=None): - """select.tril(A)""" + """``select.tril(A)``.""" A = G._A cache = G._cache if "L+" not in cache: @@ -69,7 +69,7 @@ def get_Lp(G, mask=None): def get_Um(G, mask=None): - """select.triu(A, 1)""" + """``select.triu(A, 1)``.""" A = G._A cache = G._cache if "U-" not in cache: @@ -93,7 +93,7 @@ def get_Um(G, mask=None): def get_Lm(G, mask=None): - """select.tril(A, -1)""" + """``select.tril(A, -1)``.""" A = G._A cache = G._cache if "L-" not in cache: @@ -117,7 +117,7 @@ def get_Lm(G, mask=None): def get_recip_degreesp(G, mask=None): - """pair(A & A.T).reduce_rowwise()""" + """``pair(A & A.T).reduce_rowwise()``.""" A = G._A cache = G._cache AT = cache.get("AT", A.T) @@ -159,7 +159,7 @@ def get_recip_degreesp(G, mask=None): def get_recip_degreesm(G, mask=None): - """C = select.offdiag(A) ; pair(C & C.T).reduce_rowwise()""" + """``C = select.offdiag(A) ; pair(C & C.T).reduce_rowwise()``.""" A = G._A cache = G._cache if "AT" in cache: @@ -236,7 +236,7 @@ def get_recip_degreesm(G, mask=None): def get_total_degreesp(G, mask=None): - """A.reduce_rowwise(agg.count) + A.reduce_columnwise(agg.count)""" + """``A.reduce_rowwise(agg.count) + A.reduce_columnwise(agg.count)``.""" cache = G._cache if mask is not None: if "total_degrees+" in cache: @@ -266,7 +266,7 @@ def get_total_degreesp(G, mask=None): def get_total_degreesm(G, mask=None): - """C = select.offdiag(A) ; C.reduce_rowwise(agg.count) + C.reduce_columnwise(agg.count)""" + """``C = select.offdiag(A) ; C.reduce_rowwise(agg.count) + C.reduce_columnwise(agg.count)``.""" cache = G._cache if mask is not None: if "total_degrees-" in cache: @@ -296,7 +296,7 @@ def get_total_degreesm(G, mask=None): def get_total_recipp(G, mask=None): - """pair(A & A.T).reduce_scalar()""" + """``pair(A & A.T).reduce_scalar()``.""" A = G._A cache = G._cache if "total_recip+" not in cache: @@ -315,7 +315,7 @@ def get_total_recipp(G, mask=None): def get_total_recipm(G, mask=None): - """C = select.offdiag(A) ; pair(C & C.T).reduce_scalar()""" + """``C = select.offdiag(A) ; pair(C & C.T).reduce_scalar()``.""" cache = G._cache if "total_recip-" not in cache: if "total_recip+" in cache and cache.get("has_self_edges") is False: @@ -330,7 +330,7 @@ def get_total_recipm(G, mask=None): def has_self_edges(G, mask=None): - """A.diag().nvals > 0""" + """``A.diag().nvals > 0``.""" A = G._A cache = G._cache if "has_self_edges" not in cache: diff --git a/graphblas_algorithms/classes/graph.py b/graphblas_algorithms/classes/graph.py index f3e2239..1c5e429 100644 --- a/graphblas_algorithms/classes/graph.py +++ b/graphblas_algorithms/classes/graph.py @@ -10,19 +10,19 @@ def get_A(G, mask=None): - """A""" + """``A``.""" return G._A def get_AT(G, mask=None): - """A.T""" + """``A.T``.""" A = G._A G._cache["AT"] = A return A def get_offdiag(G, mask=None): - """select.offdiag(A)""" + """``select.offdiag(A)``.""" A = G._A cache = G._cache if "offdiag" not in cache: @@ -38,7 +38,7 @@ def get_offdiag(G, mask=None): def get_Up(G, mask=None): - """select.triu(A)""" + """``select.triu(A)``.""" A = G._A cache = G._cache if "U+" not in cache: @@ -54,7 +54,7 @@ def get_Up(G, mask=None): def get_Lp(G, mask=None): - """select.tril(A)""" + """``select.tril(A)``.""" A = G._A cache = G._cache if "L+" not in cache: @@ -70,7 +70,7 @@ def get_Lp(G, mask=None): def get_Um(G, mask=None): - """select.triu(A, 1)""" + """``select.triu(A, 1)``.""" A = G._A cache = G._cache if "U-" not in cache: @@ -91,7 +91,7 @@ def get_Um(G, mask=None): def get_Lm(G, mask=None): - """select.tril(A, -1)""" + """``select.tril(A, -1)``.""" A = G._A cache = G._cache if "L-" not in cache: @@ -112,7 +112,7 @@ def get_Lm(G, mask=None): def get_diag(G, mask=None): - """A.diag()""" + """``A.diag()``.""" A = G._A cache = G._cache if "diag" not in cache: @@ -193,7 +193,7 @@ def has_negative_edgesm(G, mask=None): def has_self_edges(G, mask=None): - """A.diag().nvals > 0""" + """``A.diag().nvals > 0``.""" A = G._A cache = G._cache if "has_self_edges" not in cache: diff --git a/graphblas_algorithms/conftest.py b/graphblas_algorithms/conftest.py index 8d42a7d..8db8040 100644 --- a/graphblas_algorithms/conftest.py +++ b/graphblas_algorithms/conftest.py @@ -3,7 +3,7 @@ @pytest.fixture(scope="session", autouse=True) def ic(): - """Make `ic` available everywhere during testing for easier debugging""" + """Make `ic` available everywhere during testing for easier debugging.""" try: import icecream except ImportError: diff --git a/graphblas_algorithms/nxapi/_utils.py b/graphblas_algorithms/nxapi/_utils.py index 0bb9617..76cc9e6 100644 --- a/graphblas_algorithms/nxapi/_utils.py +++ b/graphblas_algorithms/nxapi/_utils.py @@ -88,7 +88,7 @@ def normalize_chunksize(chunksize, itemsize=1, N=None): def partition(chunksize, L, *, evenly=True): - """Partition a list into chunks""" + """Partition a list into chunks.""" N = len(L) if N == 0: return @@ -109,7 +109,7 @@ def partition(chunksize, L, *, evenly=True): def split_evenly(k, L): - """Split a list into approximately-equal parts""" + """Split a list into approximately-equal parts.""" N = len(L) if N == 0: return diff --git a/graphblas_algorithms/tests/test_match_nx.py b/graphblas_algorithms/tests/test_match_nx.py index 1924ff7..f985fd3 100644 --- a/graphblas_algorithms/tests/test_match_nx.py +++ b/graphblas_algorithms/tests/test_match_nx.py @@ -9,6 +9,7 @@ For now, though, let's try to match and stay up-to-date with NetworkX! """ + import sys from collections import namedtuple from pathlib import Path @@ -49,7 +50,7 @@ def isdispatched(func): def dispatchname(func): - """The dispatched name of the dispatchable NetworkX function""" + """The dispatched name of the dispatchable NetworkX function.""" # Haha, there should be a better way to get this if not isdispatched(func): raise ValueError(f"Function is not dispatched in NetworkX: {func.__name__}") diff --git a/pyproject.toml b/pyproject.toml index b1625c6..110d51f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,9 +61,14 @@ dependencies = [ "python-graphblas >=2023.1.0", ] +# nx < 3.2 [project.entry-points."networkx.plugins"] graphblas = "graphblas_algorithms.interface:Dispatcher" +[project.entry-points."networkx.plugins_info"] +graphblas = "_nx_graphblas:get_info" + +# nx >= 3.2 [project.entry-points."networkx.backends"] graphblas = "graphblas_algorithms.interface:Dispatcher" @@ -177,8 +182,10 @@ exclude_lines = [ # https://github.com/charliermarsh/ruff/ line-length = 100 target-version = "py310" +[tool.ruff.lint] unfixable = [ - "F841" # unused-variable (Note: can leave useless expression) + "F841", # unused-variable (Note: can leave useless expression) + "B905", # zip-without-explicit-strict (Note: prefer `zip(x, y, strict=True)`) ] select = [ "ALL", @@ -194,12 +201,16 @@ ignore = [ "D103", # Missing docstring in public function "D104", # Missing docstring in public package "D105", # Missing docstring in magic method - # "D107", # Missing docstring in `__init__` + "D107", # Missing docstring in `__init__` "D401", # First line of docstring should be in imperative mood: - # "D417", # Missing argument description in the docstring: + "D417", # D417 Missing argument description in the docstring for ...: ... "PLE0605", # Invalid format for `__all__`, must be `tuple` or `list` (Note: broken in v0.0.237) # Maybe consider + "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for isinstance checks + "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups + "S113", # Probable use of requests call without timeout + "SIM103", # Return the condition `bool(mask.reduce(monoid.lor))` directly # "SIM300", # Yoda conditions are discouraged, use ... instead (Note: we're not this picky) # "SIM401", # Use dict.get ... instead of if-else-block (Note: if-else better for coverage and sometimes clearer) # "TRY004", # Prefer `TypeError` exception for invalid type (Note: good advice, but not worth the nuisance) @@ -209,6 +220,7 @@ ignore = [ # Intentionally ignored "COM812", # Trailing comma missing "D203", # 1 blank line required before class docstring (Note: conflicts with D211, which is preferred) + "D213", # (Note: conflicts with D212, which is preferred) "D400", # First line should end with a period (Note: prefer D415, which also allows "?" and "!") "F403", # `from .classes import *` used; unable to detect undefined names (Note: used to match networkx) "N802", # Function name ... should be lowercase @@ -232,6 +244,7 @@ ignore = [ "SIM105", # Use contextlib.suppress(...) instead of try-except-pass (Note: try-except-pass is much faster) "SIM108", # Use ternary operator ... instead of if-else-block (Note: if-else better for coverage and sometimes clearer) "TRY003", # Avoid specifying long messages outside the exception class (Note: why?) + "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` (Note: using `|` is slower atm) "FIX001", "FIX002", "FIX003", "FIX004", # flake8-fixme (like flake8-todos) # Ignored categories @@ -253,19 +266,19 @@ ignore = [ "PD", # pandas-vet (Intended for scripts that use pandas, not libraries) ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] # Allow unused imports (w/o defining `__all__`) "graphblas_algorithms/**/tests/*py" = ["S101", "T201", "D103", "D100"] # Allow assert, print, and no docstring "graphblas_algorithms/interface.py" = ["PIE794"] # Allow us to use `mod = nxapi.` repeatedly "graphblas_algorithms/nxapi/exception.py" = ["F401"] # Allow unused imports (w/o defining `__all__`) "scripts/*.py" = ["INP001", "S101", "T201"] # Not a package, allow assert, allow print -[tool.ruff.flake8-builtins] +[tool.ruff.lint.flake8-builtins] builtins-ignorelist = ["copyright"] -[tool.ruff.flake8-pytest-style] +[tool.ruff.lint.flake8-pytest-style] fixture-parentheses = false mark-parentheses = false -[tool.ruff.pydocstyle] +[tool.ruff.lint.pydocstyle] convention = "numpy" diff --git a/scripts/bench.py b/scripts/bench.py index 3b3f4dc..2e432b8 100755 --- a/scripts/bench.py +++ b/scripts/bench.py @@ -67,7 +67,7 @@ def readfile(filepath, is_symmetric, backend): def best_units(num): - """Returns scale factor and prefix such that 1 <= num*scale < 1000""" + """Returns scale factor and prefix such that ``1 <= num*scale < 1000``.""" if num < 1e-12: return 1e15, "f" if num < 1e-9: