diff --git a/.cruft.json b/.cruft.json index d9f9e24e..97f74b45 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,6 +1,6 @@ { "template": "https://github.com/cthoyt/cookiecutter-snekpack", - "commit": "12edfcfa5f519467b5d834f0d4e706fb7cf4f065", + "commit": "d6ac604ce8428ddaaae55d372beef2621caed513", "checkout": null, "context": { "cookiecutter": { @@ -8,6 +8,7 @@ "package_name_stylized": "Biomappings", "short_description": "Curated and predicted mappings between biomedical identifiers in different namespaces", "author_name": "Charles Tapley Hoyt", + "author_github": "cthoyt", "author_email": "cthoyt@gmail.com", "github_organization_name": "biopragmatics", "github_repository_name": "biomappings", diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000..439db4c0 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository +github: + - cthoyt diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..faa7c6b4 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ + + +## Summary + + diff --git a/.github/workflows/cruft.yml b/.github/workflows/cruft.yml index 7442c9f5..07aa4d2b 100644 --- a/.github/workflows/cruft.yml +++ b/.github/workflows/cruft.yml @@ -2,9 +2,7 @@ name: Update repository with Cruft -permissions: - contents: write - pull-requests: write +permissions: {} on: workflow_dispatch: @@ -13,6 +11,9 @@ on: jobs: update: + permissions: + contents: write + pull-requests: write runs-on: ubuntu-latest strategy: fail-fast: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f65dca94..4e264877 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,6 +4,10 @@ name: Tests +# by default, give the GITHUB_TOKEN no permissions +# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/controlling-permissions-for-github_token +permissions: {} + on: push: branches: [ master ] @@ -13,11 +17,16 @@ on: jobs: lint: name: Code Quality + permissions: + # give only read-only access to the contents of the repository + # this is the only permission this job requires, so keep it to the least privilege + # i.e., not to issues, discussions, actions, etc. + contents: read runs-on: ubuntu-latest strategy: matrix: python-version: [ "3.12", "3.9" ] - tox-command: ["manifest", "lint", "pyroma", "mypy"] + tox-command: ["lint", "pyroma", "mypy"] steps: - uses: actions/checkout@v4 - name: "Install uv" @@ -31,6 +40,8 @@ jobs: docs: name: Documentation + permissions: + contents: read runs-on: ubuntu-latest strategy: matrix: @@ -56,6 +67,8 @@ jobs: run: uvx -p ${{ matrix.python-version }} --with tox-uv tox -e docs-test tests: name: Tests + permissions: + contents: read runs-on: ${{ matrix.os }} strategy: matrix: diff --git a/README.md b/README.md index 2c0c9a15..36bf8039 100644 --- a/README.md +++ b/README.md @@ -376,7 +376,7 @@ You only have to do the following steps once. #### Configuring your machine's connection to PyPI -You have to do the following steps once per machine. +You have to do the following steps once per machine. ```console $ uv tool install keyring diff --git a/pyproject.toml b/pyproject.toml index 2bd7a724..5f1ce15e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,8 @@ -# See https://setuptools.readthedocs.io/en/latest/build_meta.html [build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" +requires = ["uv>=0.5.10,<0.6.0"] +# The uv backend entered preview mode in https://github.com/astral-sh/uv/pull/8886/files +# with the 0.5.0 release. See also https://github.com/astral-sh/uv/issues/3957 for tracking. +build-backend = "uv" [project] name = "biomappings" @@ -26,12 +27,15 @@ classifiers = [ "Framework :: Pytest", "Framework :: tox", "Framework :: Sphinx", + "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3 :: Only", + "Typing :: Typed", "Topic :: Scientific/Engineering :: Chemistry", "Topic :: Scientific/Engineering :: Bio-Informatics", ] @@ -45,10 +49,11 @@ keywords = [ "nlp", ] -# License Information. This can be any valid SPDX identifiers that can be resolved -# with URLs like https://spdx.org/licenses/MIT -# See https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#license -license = { file = "LICENSE" } +# License Information. +# See PEP-639 at https://peps.python.org/pep-0639/#add-license-files-key +license-files = [ + "LICENSE", +] requires-python = ">=3.9" dependencies = [ @@ -64,7 +69,7 @@ dependencies = [ [project.optional-dependencies] tests = [ "pytest", - "coverage", + "coverage[toml]", ] docs = [ "sphinx>=8", @@ -99,28 +104,17 @@ exports = [ ] # See https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#urls +# and also https://packaging.python.org/en/latest/specifications/well-known-project-urls/ [project.urls] "Bug Tracker" = "https://github.com/biopragmatics/biomappings/issues" Homepage = "https://github.com/biopragmatics/biomappings" Repository = "https://github.com/biopragmatics/biomappings.git" Documentation = "https://biomappings.readthedocs.io" - -[tool.setuptools] -package-dir = { "" = "src" } - -[tool.setuptools.packages.find] -# this implicitly sets `packages = ":find"` -where = ["src"] # list of folders that contain the packages (["."] by default) - -# See https://setuptools.pypa.io/en/latest/userguide/datafiles.html -[tool.setuptools.package-data] -"*" = ["*.*"] - +Funding = "https://github.com/sponsors/" [project.scripts] biomappings = "biomappings.cli:main" - [tool.cruft] skip = [ "**/__init__.py", diff --git a/scripts/generate_chebi_mesh_mappings.py b/scripts/generate_chebi_mesh_mappings.py index 41d73601..e3956ae5 100644 --- a/scripts/generate_chebi_mesh_mappings.py +++ b/scripts/generate_chebi_mesh_mappings.py @@ -26,7 +26,7 @@ continue mesh_chebi_simple.append(mesh_chebi_pair) - print("Found %d CHEBI-MESH mappings." % len(mesh_chebi_simple)) + print(f"Found {len(mesh_chebi_simple)} CHEBI-MESH mappings.") predictions = [] n_redundant = 0 @@ -53,7 +53,7 @@ predictions.append(pred) print( - "A total of %d mappings could be indirectly inferred from" - "INDRA ontology xrefs" % len(n_redundant) + f"A total of {n_redundant} mappings could be indirectly inferred from" + "INDRA ontology xrefs" ) append_prediction_tuples(predictions, deduplicate=True, sort=True) diff --git a/scripts/generate_cl_mesh_mappings.py b/scripts/generate_cl_mesh_mappings.py index 4322a66d..cf62a8f1 100644 --- a/scripts/generate_cl_mesh_mappings.py +++ b/scripts/generate_cl_mesh_mappings.py @@ -49,7 +49,7 @@ mappings[node] = mesh_id -print("Found %d CL->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} CL->MESH mappings.") predictions = [] for cl_id, mesh_id in mappings.items(): diff --git a/scripts/generate_doid_mesh_mappings.py b/scripts/generate_doid_mesh_mappings.py index cd634566..1eabe68d 100644 --- a/scripts/generate_doid_mesh_mappings.py +++ b/scripts/generate_doid_mesh_mappings.py @@ -57,7 +57,7 @@ mappings[node] = matches[0].term.id -print("Found %d DOID->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} DOID->MESH mappings.") # We makes sure that (i) the node is not already mappable to MESH and that # (ii) there isn't some other node that was not already mapped to the @@ -73,7 +73,7 @@ cnt = Counter(mappings.values()) mappings = {k: v for k, v in mappings.items() if cnt[v] == 1} -print("Found %d filtered DOID->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} filtered DOID->MESH mappings.") # We can now add the predictions predictions = [] diff --git a/scripts/generate_hp_mesh_mappings.py b/scripts/generate_hp_mesh_mappings.py index 67d1a8eb..95de2d00 100644 --- a/scripts/generate_hp_mesh_mappings.py +++ b/scripts/generate_hp_mesh_mappings.py @@ -66,7 +66,7 @@ if grounding[0] == "MESH": mappings[node] = matches[0].term.id -print("Found %d HP->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} HP->MESH mappings.") # We makes sure that (i) the node is not already mappable to MESH and that # (ii) there isn't some other node that was not already mapped to the @@ -82,7 +82,7 @@ cnt = Counter(mappings.values()) mappings = {k: v for k, v in mappings.items() if cnt[v] == 1} -print("Found %d filtered HP->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} filtered HP->MESH mappings.") # We can now add the predictions predictions = [] diff --git a/scripts/generate_mondo_mesh_mappings.py b/scripts/generate_mondo_mesh_mappings.py index df22d23e..10bba337 100644 --- a/scripts/generate_mondo_mesh_mappings.py +++ b/scripts/generate_mondo_mesh_mappings.py @@ -42,7 +42,7 @@ mappings[node] = matches[0].term.id -print("Found %d MONDO->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} MONDO->MESH mappings.") mappings = { k: v @@ -54,7 +54,7 @@ mappings = {k: v for k, v in mappings.items() if cnt[v] == 1} -print("Found %d MONDO->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} MONDO->MESH mappings.") predictions = [] for mondo_id, mesh_id in mappings.items(): diff --git a/scripts/generate_uberon_mesh_mappings.py b/scripts/generate_uberon_mesh_mappings.py index b795ac3f..34b34014 100644 --- a/scripts/generate_uberon_mesh_mappings.py +++ b/scripts/generate_uberon_mesh_mappings.py @@ -19,7 +19,7 @@ if matches and matches[0].term.db == "MESH": mappings[node] = matches[0].term.id -print("Found %d UBERON->MESH mappings." % len(mappings)) +print(f"Found {len(mappings)} UBERON->MESH mappings.") predictions = [] for uberon_id, mesh_id in mappings.items(): diff --git a/src/biomappings/gilda_utils.py b/src/biomappings/gilda_utils.py index d3f65bb0..44245606 100644 --- a/src/biomappings/gilda_utils.py +++ b/src/biomappings/gilda_utils.py @@ -16,10 +16,10 @@ __all__ = [ "append_gilda_predictions", - "iter_prediction_tuples", "filter_custom", "filter_existing_xrefs", "has_mapping", + "iter_prediction_tuples", ] logger = logging.getLogger(__name__) diff --git a/src/biomappings/paper_analysis.py b/src/biomappings/paper_analysis.py index 1414129a..bed5fc75 100644 --- a/src/biomappings/paper_analysis.py +++ b/src/biomappings/paper_analysis.py @@ -18,9 +18,9 @@ __all__ = [ "Result", - "get_primary_mappings", - "get_obo_mappings", "get_non_obo_mappings", + "get_obo_mappings", + "get_primary_mappings", "index_mappings", ] diff --git a/src/biomappings/resources/__init__.py b/src/biomappings/resources/__init__.py index 2ca94efe..8ac87d27 100644 --- a/src/biomappings/resources/__init__.py +++ b/src/biomappings/resources/__init__.py @@ -15,31 +15,31 @@ from biomappings.utils import OVERRIDE_MIRIAM, RESOURCE_PATH, get_canonical_tuple __all__ = [ - "MappingTuple", "MAPPINGS_HEADER", - "PredictionTuple", "PREDICTIONS_HEADER", + "MappingTuple", "Mappings", - "load_mappings", - "load_mappings_subset", - "append_true_mappings", - "append_true_mapping_tuples", - "write_true_mappings", - "load_false_mappings", + "PredictionTuple", "append_false_mappings", - "write_false_mappings", - "load_unsure", - "append_unsure_mappings", - "write_unsure_mappings", - "load_predictions", - "append_predictions", "append_prediction_tuples", - "write_predictions", - "remove_mappings", - "load_curators", + "append_predictions", + "append_true_mapping_tuples", + "append_true_mappings", + "append_unsure_mappings", "filter_predictions", "get_curated_filter", + "load_curators", + "load_false_mappings", + "load_mappings", + "load_mappings_subset", + "load_predictions", + "load_unsure", "prediction_tuples_from_semra", + "remove_mappings", + "write_false_mappings", + "write_predictions", + "write_true_mappings", + "write_unsure_mappings", ] logger = logging.getLogger(__name__) diff --git a/src/biomappings/utils.py b/src/biomappings/utils.py index 3b1040fc..3c07bf9e 100644 --- a/src/biomappings/utils.py +++ b/src/biomappings/utils.py @@ -10,17 +10,17 @@ import bioregistry __all__ = [ - "get_git_hash", - "get_script_url", - "get_canonical_tuple", - "UnregisteredPrefix", - "UnstandardizedPrefix", + "CMapping", "InvalidIdentifier", - "InvalidNormIdentifier", "InvalidIdentifierPattern", + "InvalidNormIdentifier", + "UnregisteredPrefix", + "UnstandardizedPrefix", "check_valid_prefix_id", + "get_canonical_tuple", "get_curie", - "CMapping", + "get_git_hash", + "get_script_url", ] HERE = Path(__file__).parent.resolve() diff --git a/tox.ini b/tox.ini index 9571347d..23f1ef05 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,6 @@ envlist = format # format-docs # Code quality assessment - manifest pyroma lint mypy @@ -28,7 +27,12 @@ envlist = py doctests # always keep coverage-report last - # coverage-report + coverage-report + +[testenv:.pkg] +# this special environment configures the build that tox does itself +set_env = + UV_PREVIEW=1 [testenv] description = Run unit and integration tests. @@ -41,6 +45,10 @@ commands = extras = # See the [project.optional-dependencies] entry in pyproject.toml for "tests" tests +set_env = + # this setting gets inherited into all environments, meaning + # that things that call uv commands don't require a --preview + UV_PREVIEW=1 [testenv:update] commands = @@ -110,12 +118,6 @@ skip_install = true commands = rstfmt docs/source/ -[testenv:manifest] -deps = check-manifest -skip_install = true -commands = check-manifest -description = Check that the MANIFEST.in is written properly and give feedback on how to fix it. - [testenv:lint] description = Check code quality using ruff and other tools. @@ -189,11 +191,9 @@ allowlist_externals = mkdir [testenv:coverage-report] -# TODO this is broken -deps = coverage +deps = coverage[toml] skip_install = true commands = - coverage combine coverage report #################### @@ -220,7 +220,6 @@ deps = skip_install = true deps = uv - setuptools commands = uv build --sdist --wheel --no-build-isolation @@ -240,7 +239,7 @@ commands = # 7. Add your token to keyring with `keyring set https://upload.pypi.org/legacy/ __token__` [testenv:release] -description = Release the code to PyPI so users can pip install it +description = Release the code to PyPI so users can pip install it, using credentials from keyring skip_install = true deps = {[testenv:build]deps} @@ -250,6 +249,19 @@ commands = {[testenv:build]commands} uv publish --username __token__ --keyring-provider subprocess --publish-url https://upload.pypi.org/legacy/ +[testenv:release-via-env] +description = Release the code to PyPI so users can pip install it, using credentials from the environment. +skip_install = true +deps = + {[testenv:build]deps} + uv +commands = + {[testenv:build]commands} + uv publish --publish-url https://upload.pypi.org/legacy/ +passenv = + UV_PUBLISH_USERNAME + UV_PUBLISH_PASSWORD + [testenv:finish] description = Run a workflow that removes -dev from the version, creates a tagged release on GitHub,