Skip to content

Commit

Permalink
Merge branch 'master' into readme-updates-1
Browse files Browse the repository at this point in the history
  • Loading branch information
cthoyt authored Dec 18, 2024
2 parents 6b3da3f + 1a03dc7 commit d1e0993
Show file tree
Hide file tree
Showing 18 changed files with 119 additions and 78 deletions.
3 changes: 2 additions & 1 deletion .cruft.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
{
"template": "https://github.com/cthoyt/cookiecutter-snekpack",
"commit": "12edfcfa5f519467b5d834f0d4e706fb7cf4f065",
"commit": "d6ac604ce8428ddaaae55d372beef2621caed513",
"checkout": null,
"context": {
"cookiecutter": {
"package_name": "biomappings",
"package_name_stylized": "Biomappings",
"short_description": "Curated and predicted mappings between biomedical identifiers in different namespaces",
"author_name": "Charles Tapley Hoyt",
"author_github": "cthoyt",
"author_email": "[email protected]",
"github_organization_name": "biopragmatics",
"github_repository_name": "biomappings",
Expand Down
3 changes: 3 additions & 0 deletions .github/FUNDING.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository
github:
- cthoyt
17 changes: 17 additions & 0 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<!--
Thanks for contributing to `biomappings`.
To help us out with reviewing, please consider the following:
- Does this pull request include a summary of the change? (See below.)
- Does this pull request include a descriptive title?
- Does this pull request include references to any relevant issues?
Caution: the maintainers often take an active role in pull requests,
and may push to your branch. Therefore, you should always sync your
local copy of the repository with the remote before continuing your
work.
-->

## Summary

<!-- What's the purpose of the change? What does it do, and why? -->
7 changes: 4 additions & 3 deletions .github/workflows/cruft.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@

name: Update repository with Cruft

permissions:
contents: write
pull-requests: write
permissions: {}

on:
workflow_dispatch:
Expand All @@ -13,6 +11,9 @@ on:

jobs:
update:
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-latest
strategy:
fail-fast: true
Expand Down
15 changes: 14 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

name: Tests

# by default, give the GITHUB_TOKEN no permissions
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/controlling-permissions-for-github_token
permissions: {}

on:
push:
branches: [ master ]
Expand All @@ -13,11 +17,16 @@ on:
jobs:
lint:
name: Code Quality
permissions:
# give only read-only access to the contents of the repository
# this is the only permission this job requires, so keep it to the least privilege
# i.e., not to issues, discussions, actions, etc.
contents: read
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ "3.12", "3.9" ]
tox-command: ["manifest", "lint", "pyroma", "mypy"]
tox-command: ["lint", "pyroma", "mypy"]
steps:
- uses: actions/checkout@v4
- name: "Install uv"
Expand All @@ -31,6 +40,8 @@ jobs:
docs:
name: Documentation
permissions:
contents: read
runs-on: ubuntu-latest
strategy:
matrix:
Expand All @@ -56,6 +67,8 @@ jobs:
run: uvx -p ${{ matrix.python-version }} --with tox-uv tox -e docs-test
tests:
name: Tests
permissions:
contents: read
runs-on: ${{ matrix.os }}
strategy:
matrix:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ You only have to do the following steps once.

#### Configuring your machine's connection to PyPI

You have to do the following steps once per machine.
You have to do the following steps once per machine.

```console
$ uv tool install keyring
Expand Down
36 changes: 15 additions & 21 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# See https://setuptools.readthedocs.io/en/latest/build_meta.html
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
requires = ["uv>=0.5.10,<0.6.0"]
# The uv backend entered preview mode in https://github.com/astral-sh/uv/pull/8886/files
# with the 0.5.0 release. See also https://github.com/astral-sh/uv/issues/3957 for tracking.
build-backend = "uv"

[project]
name = "biomappings"
Expand All @@ -26,12 +27,15 @@ classifiers = [
"Framework :: Pytest",
"Framework :: tox",
"Framework :: Sphinx",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3 :: Only",
"Typing :: Typed",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Bio-Informatics",
]
Expand All @@ -45,10 +49,11 @@ keywords = [
"nlp",
]

# License Information. This can be any valid SPDX identifiers that can be resolved
# with URLs like https://spdx.org/licenses/MIT
# See https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#license
license = { file = "LICENSE" }
# License Information.
# See PEP-639 at https://peps.python.org/pep-0639/#add-license-files-key
license-files = [
"LICENSE",
]

requires-python = ">=3.9"
dependencies = [
Expand All @@ -64,7 +69,7 @@ dependencies = [
[project.optional-dependencies]
tests = [
"pytest",
"coverage",
"coverage[toml]",
]
docs = [
"sphinx>=8",
Expand Down Expand Up @@ -99,28 +104,17 @@ exports = [
]

# See https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#urls
# and also https://packaging.python.org/en/latest/specifications/well-known-project-urls/
[project.urls]
"Bug Tracker" = "https://github.com/biopragmatics/biomappings/issues"
Homepage = "https://github.com/biopragmatics/biomappings"
Repository = "https://github.com/biopragmatics/biomappings.git"
Documentation = "https://biomappings.readthedocs.io"

[tool.setuptools]
package-dir = { "" = "src" }

[tool.setuptools.packages.find]
# this implicitly sets `packages = ":find"`
where = ["src"] # list of folders that contain the packages (["."] by default)

# See https://setuptools.pypa.io/en/latest/userguide/datafiles.html
[tool.setuptools.package-data]
"*" = ["*.*"]

Funding = "https://github.com/sponsors/"

[project.scripts]
biomappings = "biomappings.cli:main"


[tool.cruft]
skip = [
"**/__init__.py",
Expand Down
6 changes: 3 additions & 3 deletions scripts/generate_chebi_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
continue
mesh_chebi_simple.append(mesh_chebi_pair)

print("Found %d CHEBI-MESH mappings." % len(mesh_chebi_simple))
print(f"Found {len(mesh_chebi_simple)} CHEBI-MESH mappings.")

predictions = []
n_redundant = 0
Expand All @@ -53,7 +53,7 @@
predictions.append(pred)

print(
"A total of %d mappings could be indirectly inferred from"
"INDRA ontology xrefs" % len(n_redundant)
f"A total of {n_redundant} mappings could be indirectly inferred from"
"INDRA ontology xrefs"
)
append_prediction_tuples(predictions, deduplicate=True, sort=True)
2 changes: 1 addition & 1 deletion scripts/generate_cl_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
mappings[node] = mesh_id


print("Found %d CL->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} CL->MESH mappings.")

predictions = []
for cl_id, mesh_id in mappings.items():
Expand Down
4 changes: 2 additions & 2 deletions scripts/generate_doid_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
mappings[node] = matches[0].term.id


print("Found %d DOID->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} DOID->MESH mappings.")

# We makes sure that (i) the node is not already mappable to MESH and that
# (ii) there isn't some other node that was not already mapped to the
Expand All @@ -73,7 +73,7 @@
cnt = Counter(mappings.values())
mappings = {k: v for k, v in mappings.items() if cnt[v] == 1}

print("Found %d filtered DOID->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} filtered DOID->MESH mappings.")

# We can now add the predictions
predictions = []
Expand Down
4 changes: 2 additions & 2 deletions scripts/generate_hp_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
if grounding[0] == "MESH":
mappings[node] = matches[0].term.id

print("Found %d HP->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} HP->MESH mappings.")

# We makes sure that (i) the node is not already mappable to MESH and that
# (ii) there isn't some other node that was not already mapped to the
Expand All @@ -82,7 +82,7 @@
cnt = Counter(mappings.values())
mappings = {k: v for k, v in mappings.items() if cnt[v] == 1}

print("Found %d filtered HP->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} filtered HP->MESH mappings.")

# We can now add the predictions
predictions = []
Expand Down
4 changes: 2 additions & 2 deletions scripts/generate_mondo_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
mappings[node] = matches[0].term.id


print("Found %d MONDO->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} MONDO->MESH mappings.")

mappings = {
k: v
Expand All @@ -54,7 +54,7 @@

mappings = {k: v for k, v in mappings.items() if cnt[v] == 1}

print("Found %d MONDO->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} MONDO->MESH mappings.")

predictions = []
for mondo_id, mesh_id in mappings.items():
Expand Down
2 changes: 1 addition & 1 deletion scripts/generate_uberon_mesh_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
if matches and matches[0].term.db == "MESH":
mappings[node] = matches[0].term.id

print("Found %d UBERON->MESH mappings." % len(mappings))
print(f"Found {len(mappings)} UBERON->MESH mappings.")

predictions = []
for uberon_id, mesh_id in mappings.items():
Expand Down
2 changes: 1 addition & 1 deletion src/biomappings/gilda_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

__all__ = [
"append_gilda_predictions",
"iter_prediction_tuples",
"filter_custom",
"filter_existing_xrefs",
"has_mapping",
"iter_prediction_tuples",
]

logger = logging.getLogger(__name__)
Expand Down
4 changes: 2 additions & 2 deletions src/biomappings/paper_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

__all__ = [
"Result",
"get_primary_mappings",
"get_obo_mappings",
"get_non_obo_mappings",
"get_obo_mappings",
"get_primary_mappings",
"index_mappings",
]

Expand Down
34 changes: 17 additions & 17 deletions src/biomappings/resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,31 +15,31 @@
from biomappings.utils import OVERRIDE_MIRIAM, RESOURCE_PATH, get_canonical_tuple

__all__ = [
"MappingTuple",
"MAPPINGS_HEADER",
"PredictionTuple",
"PREDICTIONS_HEADER",
"MappingTuple",
"Mappings",
"load_mappings",
"load_mappings_subset",
"append_true_mappings",
"append_true_mapping_tuples",
"write_true_mappings",
"load_false_mappings",
"PredictionTuple",
"append_false_mappings",
"write_false_mappings",
"load_unsure",
"append_unsure_mappings",
"write_unsure_mappings",
"load_predictions",
"append_predictions",
"append_prediction_tuples",
"write_predictions",
"remove_mappings",
"load_curators",
"append_predictions",
"append_true_mapping_tuples",
"append_true_mappings",
"append_unsure_mappings",
"filter_predictions",
"get_curated_filter",
"load_curators",
"load_false_mappings",
"load_mappings",
"load_mappings_subset",
"load_predictions",
"load_unsure",
"prediction_tuples_from_semra",
"remove_mappings",
"write_false_mappings",
"write_predictions",
"write_true_mappings",
"write_unsure_mappings",
]

logger = logging.getLogger(__name__)
Expand Down
14 changes: 7 additions & 7 deletions src/biomappings/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,17 @@
import bioregistry

__all__ = [
"get_git_hash",
"get_script_url",
"get_canonical_tuple",
"UnregisteredPrefix",
"UnstandardizedPrefix",
"CMapping",
"InvalidIdentifier",
"InvalidNormIdentifier",
"InvalidIdentifierPattern",
"InvalidNormIdentifier",
"UnregisteredPrefix",
"UnstandardizedPrefix",
"check_valid_prefix_id",
"get_canonical_tuple",
"get_curie",
"CMapping",
"get_git_hash",
"get_script_url",
]

HERE = Path(__file__).parent.resolve()
Expand Down
Loading

0 comments on commit d1e0993

Please sign in to comment.