From 4b38c8afcb9b67527b194b2434f977727c5f447d Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Thu, 20 Jun 2024 10:32:40 -0400 Subject: [PATCH 01/14] feat: Add type for conda_smithy --- conda_smithy/anaconda_token_rotation.py | 28 +- conda_smithy/ci_skeleton.py | 23 +- conda_smithy/cirun_utils.py | 5 +- conda_smithy/cli.py | 11 +- conda_smithy/configure_feedstock.py | 461 ++++++++++++++++-------- conda_smithy/feedstock_io.py | 33 +- conda_smithy/feedstock_tokens.py | 54 ++- conda_smithy/lint_recipe.py | 23 +- conda_smithy/schema.py | 50 +-- conda_smithy/utils.py | 13 +- conda_smithy/validate_schema.py | 24 +- conda_smithy/variant_algebra.py | 30 +- 12 files changed, 512 insertions(+), 243 deletions(-) diff --git a/conda_smithy/anaconda_token_rotation.py b/conda_smithy/anaconda_token_rotation.py index 749cf326b..41f5c76b3 100644 --- a/conda_smithy/anaconda_token_rotation.py +++ b/conda_smithy/anaconda_token_rotation.py @@ -12,6 +12,12 @@ import os import sys from contextlib import redirect_stderr, redirect_stdout +from github import Github +from typing import ( + List, + Optional, + Union +) import requests from github import Github @@ -32,17 +38,17 @@ def _get_anaconda_token(): def rotate_anaconda_token( - user, - project, - feedstock_config_path, - drone=True, - circle=True, - travis=True, - azure=True, - appveyor=True, - github_actions=True, - token_name="BINSTAR_TOKEN", - drone_endpoints=(), + user: str, + project: str, + feedstock_config_path: Optional[str], + drone: bool = True, + circle: bool = True, + travis: bool = True, + azure: bool = True, + appveyor: bool = True, + github_actions: bool = True, + token_name: str = "BINSTAR_TOKEN", + drone_endpoints: Union[list, tuple] = (), ): """Rotate the anaconda (binstar) token used by the CI providers diff --git a/conda_smithy/ci_skeleton.py b/conda_smithy/ci_skeleton.py index 41f4f1af7..2f4df96f7 100644 --- a/conda_smithy/ci_skeleton.py +++ b/conda_smithy/ci_skeleton.py @@ -9,10 +9,17 @@ import os import sys -from conda_smithy.configure_feedstock import make_jinja_env +from .configure_feedstock import make_jinja_env +from jinja2.sandbox import SandboxedEnvironment +from typing import Dict -def _render_template(template_file, env, forge_dir, config): +def _render_template( + template_file: str, + env: SandboxedEnvironment, + forge_dir: str, + config: Dict[str, str], +): """Renders the template""" template = env.get_template( os.path.basename(template_file) + ".ci-skel.tmpl" @@ -31,10 +38,10 @@ def _render_template(template_file, env, forge_dir, config): def _insert_into_gitignore( - feedstock_directory=".", - prefix="# conda smithy ci-skeleton start\n", - suffix="# conda smithy ci-skeleton end\n", -): + feedstock_directory: str = ".", + prefix: str = "# conda smithy ci-skeleton start\n", + suffix: str = "# conda smithy ci-skeleton end\n", +) -> str: """Places gitignore contents into gitignore.""" # get current contents fname = os.path.join(feedstock_directory, ".gitignore") @@ -57,7 +64,9 @@ def _insert_into_gitignore( def generate( - package_name="pkg", feedstock_directory=".", recipe_directory="recipe" + package_name: str = "pkg", + feedstock_directory: str = ".", + recipe_directory: str = "recipe", ): """Generates the CI skeleton.""" forge_dir = os.path.abspath(feedstock_directory) diff --git a/conda_smithy/cirun_utils.py b/conda_smithy/cirun_utils.py index 27b6192c4..918dc6261 100644 --- a/conda_smithy/cirun_utils.py +++ b/conda_smithy/cirun_utils.py @@ -19,6 +19,7 @@ def get_cirun_installation_id(owner: str) -> int: else: gh = Github(gh_token()) user = gh.get_user() + user_or_org: Any if user.login == owner: user_or_org = user else: @@ -42,8 +43,8 @@ def add_repo_to_cirun_resource( owner: str, repo: str, resources: List[str], - teams: List, - roles: List, + teams: Optional[List], + roles: Optional[List], users_from_json: Optional[str] = None, cirun_policy_args: Optional[List[str]] = None, ) -> Dict[str, Any]: diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index 537fb857d..95d802aa3 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -13,6 +13,7 @@ from rattler_build_conda_compat.render import MetaData as RattlerMetaData from rattler_build_conda_compat.utils import has_recipe as has_recipe_v1 from ruamel.yaml import YAML +from typing import Optional, List import conda_smithy.cirun_utils from conda_smithy import __version__, configure_feedstock, feedstock_io @@ -27,12 +28,12 @@ ) -def default_feedstock_config_path(feedstock_directory): +def default_feedstock_config_path(feedstock_directory: str) -> str: return os.path.join(feedstock_directory, "conda-forge.yml") def generate_feedstock_content( - target_directory, source_recipe_dir, conda_build_tool: Optional[str] = None + target_directory: str, source_recipe_dir: str, conda_build_tool: Optional[str] = None ): target_directory = os.path.abspath(target_directory) recipe_dir = "recipe" @@ -81,10 +82,10 @@ def generate_feedstock_content( class Subcommand: #: The name of the subcommand - subcommand = None - aliases = [] + subcommand: Optional[str] = None + aliases: List[str] = [] - def __init__(self, parser, help=None): + def __init__(self, parser, help: Optional[str] = None): subcommand_parser = parser.add_parser( self.subcommand, help=help, description=help, aliases=self.aliases ) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 8a67d5152..d4fd1a5b2 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -3,12 +3,13 @@ import hashlib import logging import os -import pprint import re import subprocess import sys +import pprint import textwrap import time +import yaml import warnings from collections import Counter, OrderedDict, namedtuple from copy import deepcopy @@ -16,9 +17,28 @@ from itertools import chain, product from os import fspath from pathlib import Path, PurePath - import requests -import yaml + +# Imports for typing +from conda_build.config import Config +from conda_build.metadata import MetaData +from jinja2.sandbox import SandboxedEnvironment +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Union, +) + +try: + from builtins import ExceptionGroup +except ImportError: + from exceptiongroup import ExceptionGroup # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` @@ -27,23 +47,26 @@ try: import simplejson as json except ImportError: - import json + import json # type: ignore + +from conda.models.match_spec import MatchSpec +from conda.models.version import VersionOrder +from conda.exceptions import InvalidVersionSpec import conda_build.api import conda_build.render import conda_build.utils import conda_build.variants -from conda.exceptions import InvalidVersionSpec +import conda_build.conda_interface +import conda_build.render from conda.models.match_spec import MatchSpec -from conda.models.version import VersionOrder -from conda_build import __version__ as conda_build_version from conda_build.metadata import get_selectors -from jinja2 import FileSystemLoader -from jinja2.sandbox import SandboxedEnvironment -from rattler_build_conda_compat.loader import parse_recipe_config_file -from rattler_build_conda_compat.render import render as rattler_render -from conda_smithy import __version__ +from copy import deepcopy + +from conda_build import __version__ as conda_build_version +from jinja2 import Environment, FileSystemLoader + from conda_smithy.feedstock_io import ( copy_file, remove_file, @@ -51,16 +74,19 @@ set_exe_file, write_file, ) +from conda_smithy.validate_schema import ( + validate_json_schema, + CONDA_FORGE_YAML_DEFAULTS_FILE, +) from conda_smithy.utils import ( - RATTLER_BUILD, - HashableDict, get_feedstock_about_from_meta, get_feedstock_name_from_meta, ) -from conda_smithy.validate_schema import ( - CONDA_FORGE_YAML_DEFAULTS_FILE, - validate_json_schema, -) + +from . import __version__ +from .rattler_build.build import render as rattler_render +from .rattler_build.loader import parse_recipe_config_file +from .utils import RATTLER_BUILD conda_forge_content = os.path.abspath(os.path.dirname(__file__)) @@ -98,7 +124,14 @@ def warn_once(msg: str): logger.warning(msg) -def package_key(config, used_loop_vars, subdir): +def package_key( + config: Dict[ + str, + Union[List[str], List[List[str]], OrderedDict] + ], + used_loop_vars: Set[str], + subdir: str, +) -> str: # get the build string from whatever conda-build makes of the configuration key = "".join( [ @@ -110,7 +143,7 @@ def package_key(config, used_loop_vars, subdir): return key.replace("*", "_").replace(" ", "_") -def _ignore_match(ignore, rel): +def _ignore_match(ignore: Union[Set[str], Tuple[()]], rel: str): """Return true if rel or any of it's PurePath().parents are in ignore i.e. putting .github in skip_render will prevent rendering of anything @@ -127,7 +160,12 @@ def _ignore_match(ignore, rel): return False -def copytree(src, dst, ignore=(), root_dst=None): +def copytree( + src: str, + dst: str, + ignore: Union[Set[str], Tuple[()]] = (), + root_dst: Optional[str] = None, +): """This emulates shutil.copytree, but does so with our git file tracking, so that the new files are added to the repo""" if root_dst is None: @@ -146,8 +184,10 @@ def copytree(src, dst, ignore=(), root_dst=None): copy_file(s, d) -def merge_list_of_dicts(list_of_dicts): - squished_dict = OrderedDict() +def merge_list_of_dicts( + list_of_dicts: List[Dict[str, List[str]]] +) -> OrderedDict: + squished_dict: OrderedDict = OrderedDict() for idict in list_of_dicts: for key, val in idict.items(): if key not in squished_dict: @@ -156,11 +196,15 @@ def merge_list_of_dicts(list_of_dicts): return squished_dict -def argsort(seq): +def argsort( + seq: List[tuple] +) -> List[int]: return sorted(range(len(seq)), key=seq.__getitem__) -def sort_config(config, zip_key_groups): +def sort_config( + config: OrderedDict, zip_key_groups: List[Union[List[str], Any]] +): groups = copy.deepcopy(zip_key_groups) for i, group in enumerate(groups): groups[i] = [pkg for pkg in group if pkg in config.keys()] @@ -199,7 +243,19 @@ def sort_config(config, zip_key_groups): config[key] = p -def break_up_top_level_values(top_level_keys, squished_variants): +def break_up_top_level_values( + top_level_keys: Set[str], squished_variants: OrderedDict +) -> List[ + Dict[ + str, + Union[ + List[str], + OrderedDict, + List[Union[str, List[str]]], + List[List[str]], + ], + ] +]: """top-level values make up CI configurations. We need to break them up into individual files.""" @@ -211,8 +267,8 @@ def break_up_top_level_values(top_level_keys, squished_variants): zip_key_groups = squished_variants["zip_keys"] if zip_key_groups and not isinstance(zip_key_groups[0], list): zip_key_groups = [zip_key_groups] - zipped_configs = [] - top_level_dimensions = [] + zipped_configs: list = [] + top_level_dimensions: list = [] for key in top_level_keys: if key in accounted_for_keys: # remove the used variables from the collection of all variables - we have them in the @@ -227,9 +283,9 @@ def break_up_top_level_values(top_level_keys, squished_variants): # config in its own file zipped_config = [] - top_level_config_dict = OrderedDict() + top_level_config_dict: dict = OrderedDict() for idx, variant_key in enumerate(squished_variants[key]): - top_level_config = [] + top_level_config: Any = [] for k in group: if k in top_level_keys: top_level_config.append( @@ -268,6 +324,7 @@ def break_up_top_level_values(top_level_keys, squished_variants): sort_config(squished_variants, zip_key_groups) for zipped_config in zipped_configs: + config: dict for config in zipped_config: sort_config(config, zip_key_groups) @@ -286,7 +343,7 @@ def break_up_top_level_values(top_level_keys, squished_variants): return configs -def _package_var_name(pkg): +def _package_var_name(pkg: str) -> str: return pkg.replace("-", "_") @@ -307,7 +364,9 @@ def _trim_unused_zip_keys(all_used_vars): del all_used_vars["zip_keys"] -def _trim_unused_pin_run_as_build(all_used_vars): +def _trim_unused_pin_run_as_build( + all_used_vars: dict +): """Remove unused keys in pin_run_as_build sets""" pkgs = all_used_vars.get("pin_run_as_build", {}) used_pkgs = {} @@ -322,10 +381,19 @@ def _trim_unused_pin_run_as_build(all_used_vars): def _get_used_key_values_by_input_order( - squished_input_variants, - squished_used_variants, - all_used_vars, -): + squished_input_variants: Union[ + OrderedDict, + Dict[ + str, + Union[set, dict, list, tuple], + ], + ], + squished_used_variants: Union[ + OrderedDict, + dict + ], + all_used_vars: Set[str], +) -> tuple: used_key_values = { key: squished_input_variants[key] for key in all_used_vars @@ -351,8 +419,8 @@ def _get_used_key_values_by_input_order( for keyset, tuples in zipped_tuples.items(): # for each set of zipped keys from squished_input_variants, # we trim them down to what is in squished_used_variants - used_keyset = [] - used_keyset_inds = [] + used_keyset: Any = [] + used_keyset_inds: Any = [] for k in keyset: if k in squished_used_variants: used_keyset.append(k) @@ -426,7 +494,10 @@ def _get_used_key_values_by_input_order( return used_key_values, zipped_keys -def _merge_deployment_target(container_of_dicts, has_macdt): +def _merge_deployment_target( + container_of_dicts: set, + has_macdt: bool, +) -> set: """ For a collection of variant dictionaries, merge deployment target specs. @@ -504,8 +575,12 @@ def _merge_deployment_target(container_of_dicts, has_macdt): def _collapse_subpackage_variants( - list_of_metas, root_path, platform, arch, forge_config -): + list_of_metas: List[MetaData], + root_path: str, + platform: str, + arch: str, + forge_config: Dict[str, Any], +) -> tuple: """Collapse all subpackage node variants into one aggregate collection of used variables We get one node per output, but a given recipe can have multiple outputs. Each output @@ -517,7 +592,7 @@ def _collapse_subpackage_variants( top_level_loop_vars = set() all_used_vars = set() - all_variants = set() + all_variants: set = set() is_noarch = True @@ -616,6 +691,7 @@ def _collapse_subpackage_variants( logger.debug("top_level_vars %s", pprint.pformat(top_level_vars)) logger.debug("top_level_loop_vars %s", pprint.pformat(top_level_loop_vars)) + used_key_values: Any used_key_values, used_zipped_vars = _get_used_key_values_by_input_order( squished_input_variants, squished_used_variants, @@ -652,7 +728,9 @@ def _collapse_subpackage_variants( ) -def _yaml_represent_ordereddict(yaml_representer, data): +def _yaml_represent_ordereddict( + yaml_representer, data: OrderedDict +): # represent_dict processes dict-likes with a .sort() method or plain iterables of key-value # pairs. Only for the latter it never sorts and retains the order of the OrderedDict. return yaml.representer.SafeRepresenter.represent_dict( @@ -660,7 +738,7 @@ def _yaml_represent_ordereddict(yaml_representer, data): ) -def _santize_remote_ci_setup(remote_ci_setup): +def _santize_remote_ci_setup(remote_ci_setup: List[str]) -> List[str]: remote_ci_setup_ = conda_build.utils.ensure_list(remote_ci_setup) remote_ci_setup = [] for package in remote_ci_setup_: @@ -672,7 +750,18 @@ def _santize_remote_ci_setup(remote_ci_setup): return remote_ci_setup -def finalize_config(config, platform, arch, forge_config): +def finalize_config( + config: Dict[ + str, + Union[list, OrderedDict], + ], + platform: str, + arch: str, + forge_config: Dict[str, Any], +) -> Dict[ + str, + Union[List[str], List[List[str]], OrderedDict], +]: """For configs without essential parameters like docker_image add fallback value. """ @@ -694,8 +783,13 @@ def finalize_config(config, platform, arch, forge_config): def dump_subspace_config_files( - metas, root_path, platform, arch, upload, forge_config -): + metas: List[MetaData], + root_path: str, + platform: str, + arch: str, + upload: bool, + forge_config: Dict[str, Any], +) -> List[Dict[str, Any]]: """With conda-build 3, it handles the build matrix. We take what it spits out, and write a config.yaml file for each matrix entry that it spits out. References to a specific file replace all of the old environment variables that specified a matrix entry. @@ -766,8 +860,11 @@ def dump_subspace_config_files( def _get_fast_finish_script( - provider_name, forge_config, forge_dir, fast_finish_text -): + provider_name: str, + forge_config: Dict[str, Any], + forge_dir: str, + fast_finish_text: str, +) -> str: get_fast_finish_script = "" fast_finish_script = "" tooling_branch = forge_config["github"]["tooling_branch_name"] @@ -816,7 +913,12 @@ def _get_fast_finish_script( return fast_finish_text -def migrate_combined_spec(combined_spec, forge_dir, config, forge_config): +def migrate_combined_spec( + combined_spec: Any, + forge_dir: str, + config: Config, + forge_config: Dict[str, Any], +) -> Dict[str, Union[List[str], Dict[str, Dict[str, str]], List[List[str]]]]: """CFEP-9 variant migrations Apply the list of migrations configurations to the build (in the correct sequence) @@ -856,14 +958,15 @@ def migrate_combined_spec(combined_spec, forge_dir, config, forge_config): def _conda_build_api_render_for_smithy( - recipe_path, - config=None, - variants=None, - permit_unsatisfiable_variants=True, - finalize=True, - bypass_env_check=False, + recipe_path: str, + config = None, + variants: Optional[Dict[str, Union[List[str], List[List[str]], Dict[str, Dict[str, str]]]]] + = None, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, **kwargs, -): +) -> List[Union[Tuple[MetaData, bool, bool], Any]]: """This function works just like conda_build.api.render, but it returns all of metadata objects regardless of whether they produce a unique package hash / name. @@ -933,21 +1036,21 @@ def _conda_build_api_render_for_smithy( def _render_ci_provider( - provider_name, - jinja_env, - forge_config, - forge_dir, - platforms, - archs, - fast_finish_text, - platform_target_path, - platform_template_file, - platform_specific_setup, - keep_noarchs=None, - extra_platform_files={}, - upload_packages=[], - return_metadata=False, -): + provider_name: str, + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platforms: List[Union[Any, str]], + archs: List[Union[Any, str]], + fast_finish_text: str, + platform_target_path: str, + platform_template_file: str, + platform_specific_setup: Callable, + keep_noarchs: Optional[List[Union[Any, bool]]] = None, + extra_platform_files: Dict[str, List[str]] = {}, + upload_packages: List[Union[Any, bool]] = [], + return_metadata: bool = False, +) -> Dict[str, Any]: if keep_noarchs is None: keep_noarchs = [False] * len(platforms) @@ -1001,31 +1104,14 @@ def _render_ci_provider( os.path.join(forge_dir, forge_config["recipe_dir"]), config=config ) - # If we are using new recipe - # we also load v1 variants.yaml - if recipe_file == "recipe.yaml": - # get_selectors from conda-build return namespace - # so it is usefull to reuse it here - namespace = get_selectors(config) - variants_path = os.path.join( - forge_dir, forge_config["recipe_dir"], "variants.yaml" - ) - if os.path.exists(variants_path): - new_spec = parse_recipe_config_file(variants_path, namespace) - specs = { - "combined_spec": combined_variant_spec, - "variants.yaml": new_spec, - } - combined_variant_spec = conda_build.variants.combine_specs( - specs - ) - + migrated_combined_variant_spec: Any migrated_combined_variant_spec = migrate_combined_spec( combined_variant_spec, forge_dir, config, forge_config, ) + channel_target: Any for channel_target in migrated_combined_variant_spec.get( "channel_targets", [] ): @@ -1107,6 +1193,7 @@ def _render_ci_provider( if not keep_noarch: to_delete = [] + meta: Any for idx, meta in enumerate(metas): if meta.noarch: # do not build noarch, including noarch: python, packages on Travis CI. @@ -1246,7 +1333,9 @@ def _render_ci_provider( return forge_config -def _get_build_setup_line(forge_dir, platform, forge_config): +def _get_build_setup_line( + forge_dir: str, platform: str, forge_config: Dict[str, Any] +) -> str: # If the recipe supplies its own run_conda_forge_build_setup script_linux, # we use it instead of the global one. if platform == "linux": @@ -1314,7 +1403,12 @@ def _get_build_setup_line(forge_dir, platform, forge_config): return build_setup -def _circle_specific_setup(jinja_env, forge_config, forge_dir, platform): +def _circle_specific_setup( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, +): if platform == "linux": yum_build_setup = generate_yum_requirements(forge_config, forge_dir) if yum_build_setup: @@ -1347,7 +1441,9 @@ def _circle_specific_setup(jinja_env, forge_config, forge_dir, platform): set_exe_file(target_fname, True) -def generate_yum_requirements(forge_config, forge_dir): +def generate_yum_requirements( + forge_config: Dict[str, Any], forge_dir: str +) -> str: # If there is a "yum_requirements.txt" file in the recipe, we honour it. yum_requirements_fpath = os.path.join( forge_dir, forge_config["recipe_dir"], "yum_requirements.txt" @@ -1383,7 +1479,12 @@ def generate_yum_requirements(forge_config, forge_dir): return yum_build_setup -def _get_platforms_of_provider(provider, forge_config): +def _get_platforms_of_provider( + provider: str, forge_config: Dict[str, Any] +) -> Union[ + Tuple[List[str], List[str], List[bool], List[bool]], + Tuple[List[Any], List[Any], List[Any], List[Any]], +]: platforms = [] keep_noarchs = [] archs = [] @@ -1429,7 +1530,12 @@ def _get_platforms_of_provider(provider, forge_config): return platforms, archs, keep_noarchs, upload_packages -def render_circle(jinja_env, forge_config, forge_dir, return_metadata=False): +def render_circle( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join(forge_dir, ".circleci", "config.yml") template_filename = "circle.yml.tmpl" fast_finish_text = textwrap.dedent( @@ -1470,7 +1576,12 @@ def render_circle(jinja_env, forge_config, forge_dir, return_metadata=False): ) -def _travis_specific_setup(jinja_env, forge_config, forge_dir, platform): +def _travis_specific_setup( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, +): build_setup = _get_build_setup_line(forge_dir, platform, forge_config) platform_templates = { @@ -1478,7 +1589,7 @@ def _travis_specific_setup(jinja_env, forge_config, forge_dir, platform): "osx": [".scripts/run_osx_build.sh"], "win": [], } - template_files = platform_templates.get(platform, []) + template_files: Any = platform_templates.get(platform, []) if platform == "linux": yum_build_setup = generate_yum_requirements(forge_config, forge_dir) @@ -1496,7 +1607,10 @@ def _travis_specific_setup(jinja_env, forge_config, forge_dir, platform): def _render_template_exe_files( - forge_config, jinja_env, template_files, forge_dir + forge_config: Dict[str, Any], + jinja_env: SandboxedEnvironment, + template_files: List[str], + forge_dir: str, ): for template_file in template_files: template = jinja_env.get_template( @@ -1532,7 +1646,12 @@ def _render_template_exe_files( set_exe_file(target_fname, True) -def render_travis(jinja_env, forge_config, forge_dir, return_metadata=False): +def render_travis( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join(forge_dir, ".travis.yml") template_filename = "travis.yml.tmpl" fast_finish_text = "" @@ -1561,7 +1680,12 @@ def render_travis(jinja_env, forge_config, forge_dir, return_metadata=False): ) -def _appveyor_specific_setup(jinja_env, forge_config, forge_dir, platform): +def _appveyor_specific_setup( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, +): build_setup = _get_build_setup_line(forge_dir, platform, forge_config) build_setup = build_setup.rstrip() new_build_setup = "" @@ -1575,7 +1699,12 @@ def _appveyor_specific_setup(jinja_env, forge_config, forge_dir, platform): forge_config["build_setup"] = build_setup -def render_appveyor(jinja_env, forge_config, forge_dir, return_metadata=False): +def render_appveyor( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join(forge_dir, ".appveyor.yml") fast_finish_text = textwrap.dedent( """\ @@ -1610,7 +1739,10 @@ def render_appveyor(jinja_env, forge_config, forge_dir, return_metadata=False): def _github_actions_specific_setup( - jinja_env, forge_config, forge_dir, platform + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, ): # Handle GH-hosted and self-hosted runners runs-on config # Do it before the deepcopy below so these changes can be used by the @@ -1733,8 +1865,11 @@ def _github_actions_specific_setup( def render_github_actions( - jinja_env, forge_config, forge_dir, return_metadata=False -): + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join( forge_dir, ".github", "workflows", "conda-build.yml" ) @@ -1768,7 +1903,12 @@ def render_github_actions( ) -def _azure_specific_setup(jinja_env, forge_config, forge_dir, platform): +def _azure_specific_setup( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, +): build_setup = _get_build_setup_line(forge_dir, platform, forge_config) if platform == "linux": @@ -1853,7 +1993,12 @@ def _azure_specific_setup(jinja_env, forge_config, forge_dir, platform): ) -def render_azure(jinja_env, forge_config, forge_dir, return_metadata=False): +def render_azure( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join(forge_dir, "azure-pipelines.yml") template_filename = "azure-pipelines.yml.tmpl" fast_finish_text = "" @@ -1885,13 +2030,18 @@ def render_azure(jinja_env, forge_config, forge_dir, return_metadata=False): ) -def _drone_specific_setup(jinja_env, forge_config, forge_dir, platform): +def _drone_specific_setup( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + platform: str, +): platform_templates = { "linux": [".scripts/build_steps.sh"], "osx": [], "win": [], } - template_files = platform_templates.get(platform, []) + template_files: Any = platform_templates.get(platform, []) build_setup = _get_build_setup_line(forge_dir, platform, forge_config) @@ -1910,7 +2060,12 @@ def _drone_specific_setup(jinja_env, forge_config, forge_dir, platform): ) -def render_drone(jinja_env, forge_config, forge_dir, return_metadata=False): +def render_drone( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> Dict[str, Any]: target_path = os.path.join(forge_dir, ".drone.yml") template_filename = "drone.yml.tmpl" fast_finish_text = "" @@ -1943,8 +2098,11 @@ def render_drone(jinja_env, forge_config, forge_dir, return_metadata=False): def render_woodpecker( - jinja_env, forge_config, forge_dir, return_metadata=False -): + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + return_metadata: bool = False, +) -> dict: target_path = os.path.join(forge_dir, ".woodpecker.yml") template_filename = "woodpecker.yml.tmpl" fast_finish_text = "" @@ -1989,7 +2147,7 @@ def azure_build_id_from_token(forge_config): forge_config["azure"]["build_id"] = build_info["build_id"] -def azure_build_id_from_public(forge_config): +def azure_build_id_from_public(forge_config: Dict[str, Any]): """Retrieve Azure `build_id` from a `forge_config`. This function only works when the Azure org is public. """ @@ -2005,15 +2163,12 @@ def azure_build_id_from_public(forge_config): forge_config["azure"]["build_id"] = build_def["id"] -def get_maintainer_url(user_or_team): - if "/" in user_or_team: - org, team_name = user_or_team.split("/") - return f"https://github.com/orgs/{org}/teams/{team_name}/" - else: - return f"https://github.com/{user_or_team}/" - - -def render_readme(jinja_env, forge_config, forge_dir, render_info=None): +def render_README( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, + render_info: Optional[list] = None, +): if "README.md" in forge_config["skip_render"]: logger.info("README.md rendering is skipped") return @@ -2149,14 +2304,18 @@ def render_readme(jinja_env, forge_config, forge_dir, render_info=None): remove_file_or_dir(code_owners_file) -def _get_skip_files(forge_config): +def _get_skip_files(forge_config: Dict[str, Any]) -> Set[str]: skip_files = {"README", "__pycache__"} for f in forge_config["skip_render"]: skip_files.add(f) return skip_files -def render_github_actions_services(jinja_env, forge_config, forge_dir): +def render_github_actions_services( + jinja_env: SandboxedEnvironment, + forge_config: Dict[str, Any], + forge_dir: str, +): # render github actions files for automerge and rerendering services skip_files = _get_skip_files(forge_config) for template_file in ["automerge.yml", "webservices.yml"]: @@ -2170,7 +2329,7 @@ def render_github_actions_services(jinja_env, forge_config, forge_dir): fh.write(new_file_contents) -def copy_feedstock_content(forge_config, forge_dir): +def copy_feedstock_content(forge_config: Dict[str, Any], forge_dir: str): feedstock_content = os.path.join(conda_forge_content, "feedstock_content") skip_files = _get_skip_files(forge_config) copytree(feedstock_content, forge_dir, skip_files) @@ -2188,7 +2347,9 @@ def _update_dict_within_dict(items, config): return config -def _read_forge_config(forge_dir, forge_yml=None): +def _read_forge_config( + forge_dir: str, forge_yml: Optional[str] = None +) -> Dict[str, Any]: # Load default values from the conda-forge.yml file with open(CONDA_FORGE_YAML_DEFAULTS_FILE) as fh: default_config = yaml.safe_load(fh.read()) @@ -2207,10 +2368,10 @@ def _read_forge_config(forge_dir, forge_yml=None): with open(forge_yml) as fh: documents = list(yaml.safe_load_all(fh)) - file_config = (documents or [None])[0] or {} + file_config: Any = (documents or [None])[0] or {} # Validate loaded configuration against a JSON schema. - validate_lints, validate_hints = validate_json_schema(file_config) + validate_lints, validate_hints = validate_json_schema(file_config, None) for err in chain(validate_lints, validate_hints): logger.warning( "%s: %s = %s -> %s", @@ -2265,7 +2426,9 @@ def _read_forge_config(forge_dir, forge_yml=None): return config -def _legacy_compatibility_checks(config: dict, forge_dir): +def _legacy_compatibility_checks( + config: dict, forge_dir: str +) -> Dict[str, Any]: # An older conda-smithy used to have some files which should no longer exist, # remove those now. old_files = [ @@ -2299,7 +2462,9 @@ def _legacy_compatibility_checks(config: dict, forge_dir): return config -def _load_forge_config(forge_dir, exclusive_config_file, forge_yml=None): +def _load_forge_config( + forge_dir: str, exclusive_config_file: str, forge_yml: Optional[str] = None +) -> Dict[str, Any]: config = _read_forge_config(forge_dir, forge_yml=forge_yml) for plat in ["linux", "osx", "win"]: @@ -2452,7 +2617,13 @@ def check_version_uptodate(name, installed_version, error_on_warn): logger.info(msg) -def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): +def commit_changes( + forge_file_directory: str, + commit: bool, + cs_ver: str, + cfp_ver: None, + cb_ver: str, +): if cfp_ver: msg = f"Re-rendered with conda-build {cb_ver}, conda-smithy {cs_ver}, and conda-forge-pinning {cfp_ver}" else: @@ -2573,7 +2744,7 @@ def get_cached_cfp_file_path(temporary_directory): return get_cfp_file_path(temporary_directory) -def clear_variants(forge_dir): +def clear_variants(forge_dir: str): "Remove all variant files placed in the .ci_support path" if os.path.isdir(os.path.join(forge_dir, ".ci_support")): configs = glob.glob(os.path.join(forge_dir, ".ci_support", "*.yaml")) @@ -2581,7 +2752,7 @@ def clear_variants(forge_dir): remove_file(config) -def get_common_scripts(forge_dir): +def get_common_scripts(forge_dir: str) -> Iterator[str]: for old_file in [ "run_docker_build.sh", "build_steps.sh", @@ -2592,7 +2763,7 @@ def get_common_scripts(forge_dir): yield os.path.join(forge_dir, ".scripts", old_file) -def clear_scripts(forge_dir): +def clear_scripts(forge_dir: str): for folder in [ ".azure-pipelines", ".circleci", @@ -2611,7 +2782,7 @@ def clear_scripts(forge_dir): remove_file(os.path.join(forge_dir, folder, old_file)) -def make_jinja_env(feedstock_directory): +def make_jinja_env(feedstock_directory: str) -> SandboxedEnvironment: """Creates a Jinja environment usable for rendering templates""" forge_dir = os.path.abspath(feedstock_directory) tmplt_dir = os.path.join(conda_forge_content, "templates") @@ -2625,7 +2796,9 @@ def make_jinja_env(feedstock_directory): return env -def get_migrations_in_dir(migrations_root): +def get_migrations_in_dir( + migrations_root: str, +) -> Dict[str, Tuple[str, int, bool]]: """ Given a directory, return the migrations as a mapping from the timestamp to a tuple of (filename, migration_number) @@ -2652,7 +2825,7 @@ def get_migrations_in_dir(migrations_root): return res -def set_migration_fns(forge_dir, forge_config): +def set_migration_fns(forge_dir: str, forge_config: Dict[str, Any]): """ This will calculate the migration files and set migration_fns in the forge_config as a list. @@ -2716,13 +2889,13 @@ def set_migration_fns(forge_dir, forge_config): def main( - forge_file_directory, - forge_yml=None, - no_check_uptodate=False, - commit=False, - exclusive_config_file=None, - check=False, - temporary_directory=None, + forge_file_directory: str, + forge_yml: Optional[str] = None, + no_check_uptodate: bool = False, + commit: bool = False, + exclusive_config_file: Optional[str] = None, + check: bool = False, + temporary_directory: Optional[str] = None, ): loglevel = os.environ.get("CONDA_SMITHY_LOGLEVEL", "INFO").upper() logger.setLevel(loglevel) diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index 473c894c0..f0297a4af 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -1,10 +1,19 @@ +from contextlib import contextmanager +import io import os import shutil import stat -from contextlib import contextmanager - - -def get_repo(path, search_parent_directories=True): +from io import TextIOWrapper +from typing import ( + Iterator, + Optional, + Any, + Union +) + +def get_repo( + path: str, search_parent_directories: bool = True +): repo = None try: import git @@ -20,19 +29,19 @@ def get_repo(path, search_parent_directories=True): return repo -def get_repo_root(path): +def get_repo_root(path: str) -> Optional[str]: try: return get_repo(path).working_tree_dir except AttributeError: return None -def set_exe_file(filename, set_exe=True): +def set_exe_file(filename: str, set_exe: bool = True): all_execute_permissions = stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR repo = get_repo(filename) if repo: - mode = "+x" if set_exe else "-x" + mode: Union[str, int] = "+x" if set_exe else "-x" repo.git.execute(["git", "update-index", f"--chmod={mode}", filename]) mode = os.stat(filename).st_mode @@ -44,7 +53,7 @@ def set_exe_file(filename, set_exe=True): @contextmanager -def write_file(filename): +def write_file(filename: str) -> Iterator[TextIOWrapper]: dirname = os.path.dirname(filename) if dirname and not os.path.exists(dirname): os.makedirs(dirname) @@ -57,12 +66,12 @@ def write_file(filename): repo.index.add([filename]) -def touch_file(filename): +def touch_file(filename: str): with write_file(filename) as fh: fh.write("") -def remove_file_or_dir(filename): +def remove_file_or_dir(filename: str) -> None: if not os.path.isdir(filename): return remove_file(filename) @@ -72,7 +81,7 @@ def remove_file_or_dir(filename): shutil.rmtree(filename) -def remove_file(filename): +def remove_file(filename: str): touch_file(filename) repo = get_repo(filename) @@ -86,7 +95,7 @@ def remove_file(filename): os.removedirs(dirname) -def copy_file(src, dst): +def copy_file(src: str, dst: str): """ Tried to copy utf-8 text files line-by-line to avoid getting CRLF characters added on Windows. diff --git a/conda_smithy/feedstock_tokens.py b/conda_smithy/feedstock_tokens.py index 1bb189cef..bb01a0f5e 100644 --- a/conda_smithy/feedstock_tokens.py +++ b/conda_smithy/feedstock_tokens.py @@ -34,6 +34,12 @@ import git import requests import scrypt +from typing import ( + Iterator, + Optional, + Tuple, + Union, +) class FeedstockTokenError(Exception): @@ -41,7 +47,7 @@ class FeedstockTokenError(Exception): @contextmanager -def _secure_io(): +def _secure_io() -> Iterator[None]: """context manager that redirects stdout and stderr to /dev/null to avoid spilling tokens""" @@ -54,7 +60,9 @@ def _secure_io(): yield -def feedstock_token_local_path(user, project, provider=None): +def feedstock_token_local_path( + user: str, project: str, provider: Optional[str] = None +) -> str: """Return the path locally where the feedstock token is stored. """ @@ -73,7 +81,9 @@ def feedstock_token_local_path(user, project, provider=None): return os.path.expanduser(pth) -def generate_and_write_feedstock_token(user, project, provider=None): +def generate_and_write_feedstock_token( + user: str, project: str, provider: Optional[str] = None +) -> bool: """Generate a feedstock token and write it to the file given by ``feedstock_token_local_path(user, project, provider=provider)``. @@ -122,7 +132,9 @@ def generate_and_write_feedstock_token(user, project, provider=None): return failed -def read_feedstock_token(user, project, provider=None): +def read_feedstock_token( + user: str, project: str, provider: Optional[str] = None +) -> Union[Tuple[Optional[str], Optional[str]]]: """Read the feedstock token from the path given by ``feedstock_token_local_path(user, project, provider=provider)``. @@ -149,7 +161,9 @@ def read_feedstock_token(user, project, provider=None): return feedstock_token, err_msg -def feedstock_token_exists(user, project, token_repo, provider=None): +def feedstock_token_exists( + user: str, project: str, token_repo: str, provider: Optional[str] = None +) -> bool: """Test if the feedstock token exists for the given repo. All exceptions are swallowed and stdout/stderr from this function is @@ -220,8 +234,12 @@ def feedstock_token_exists(user, project, token_repo, provider=None): def is_valid_feedstock_token( - user, project, feedstock_token, token_repo, provider=None -): + user: str, + project: str, + feedstock_token: str, + token_repo: str, + provider: Optional[str] = None, +) -> bool: """Test if the input feedstock_token is valid. All exceptions are swallowed and stdout/stderr from this function is @@ -303,7 +321,9 @@ def is_valid_feedstock_token( return valid -def register_feedstock_token(user, project, token_repo, provider=None): +def register_feedstock_token( + user: str, project: str, token_repo: str, provider: Optional[str] = None +) -> bool: """Register the feedstock token with the token repo. This function uses a random salt and scrypt to hash the feedstock @@ -406,17 +426,17 @@ def register_feedstock_token(user, project, token_repo, provider=None): def register_feedstock_token_with_providers( - user, - project, + user: str, + project: str, *, - drone=True, - circle=True, - travis=True, - azure=True, - github_actions=True, - clobber=True, + drone: bool = True, + circle: bool = True, + travis: bool = True, + azure: bool = True, + github_actions: bool = True, + clobber: bool = True, drone_endpoints=(), - unique_token_per_provider=False, + unique_token_per_provider: bool = False, ): """Register the feedstock token with provider CI services. diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index 825ba192d..bea2ca1d9 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -7,7 +7,17 @@ from inspect import cleandoc from pathlib import Path from textwrap import indent -from typing import Any, List, Optional, Tuple +from io import TextIOWrapper +from typing import ( + Any, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Union, +) import github import jsonschema @@ -93,7 +103,7 @@ def lintify_forge_yaml(recipe_dir: Optional[str] = None) -> (list, list): forge_yaml = {} # This is where we validate against the jsonschema and execute our custom validators. - return validate_json_schema(forge_yaml) + return validate_json_schema(forge_yaml, None) def lintify_meta_yaml( @@ -639,8 +649,13 @@ def _format_validation_msg(error: jsonschema.ValidationError): def main( - recipe_dir, conda_forge=False, return_hints=False, feedstock_dir=None -): + recipe_dir: str, conda_forge: bool = False, return_hints: bool = False, feedstock_dir=None +) -> Union[ + Tuple[List[str], List[str]], + Tuple[List[str], List[Any]], + List[str], + Tuple[List[Any], List[str]], +]: recipe_dir = os.path.abspath(recipe_dir) build_tool = CONDA_BUILD_TOOL if feedstock_dir: diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index 2bd56947b..a3d8472be 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -99,7 +99,7 @@ class BotConfigVersionUpdatesSourcesChoice(StrEnum): class AzureRunnerSettings(BaseModel): """This is the settings for runners.""" - model_config: ConfigDict = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow") pool: Optional[Dict[str, str]] = Field( default_factory=lambda: {"vmImage": "ubuntu-latest"}, @@ -135,7 +135,7 @@ class AzureConfig(BaseModel): https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/?view=azure-pipelines). """ - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") force: Optional[bool] = Field( default=False, @@ -236,7 +236,7 @@ class AzureConfig(BaseModel): class GithubConfig(BaseModel): - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") user_or_org: Optional[str] = Field( description="The name of the GitHub user or organization", @@ -258,7 +258,7 @@ class GithubConfig(BaseModel): class GithubActionsConfig(BaseModel): - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") artifact_retention_days: Optional[int] = Field( description="The number of days to retain artifacts", @@ -323,7 +323,7 @@ class BotConfigVersionUpdates(BaseModel): updates """ - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") random_fraction_to_keep: Optional[float] = Field( None, @@ -382,7 +382,7 @@ class BotConfig(BaseModel): automatic version updates/migrations for feedstocks. """ - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") automerge: Optional[Union[bool, BotConfigAutoMergeChoice]] = Field( False, @@ -417,7 +417,7 @@ class BotConfig(BaseModel): class CondaBuildConfig(BaseModel): - model_config: ConfigDict = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow") pkg_format: Optional[Literal["tar", 1, 2, "1", "2"]] = Field( description="The package version format for conda build.", @@ -447,7 +447,7 @@ class CondaBuildConfig(BaseModel): class CondaForgeDocker(BaseModel): - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") executable: Optional[str] = Field( description="The executable for Docker", default="docker" @@ -473,7 +473,7 @@ class CondaForgeDocker(BaseModel): class ShellCheck(BaseModel): - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") enabled: bool = Field( description="Whether to use shellcheck to lint shell scripts", @@ -487,7 +487,7 @@ class PlatformsAliases(StrEnum): osx = "osx" -def get_subdirs(): +def get_subdirs() -> List[str]: return [ subdir.replace("-", "_") for subdir in KNOWN_SUBDIRS if "-" in subdir ] @@ -507,29 +507,28 @@ class DefaultTestPlatforms(StrEnum): native = "native" native_and_emulated = "native_and_emulated" - -BuildPlatform = create_model( - "build_platform", - **{ +buildPlatform_fields: Dict[str, Any] = { platform.value: (Optional[Platforms], Field(default=platform.value)) for platform in Platforms - }, + } +BuildPlatform = create_model( + "build_platform", + **buildPlatform_fields, ) -OSVersion = create_model( - "os_version", - **{ +OSVersion_fields: Dict[str, Any] = { platform.value: (Optional[Union[str, Nullable]], Field(default=None)) for platform in Platforms if platform.value.startswith("linux") - }, + } +OSVersion = create_model( + "os_version", + **OSVersion_fields, ) ProviderType = Union[List[CIservices], CIservices, bool, Nullable] -Provider = create_model( - "provider", - **dict( +provider_fields: Dict[str, Any] = dict( [ (str(plat), (Optional[ProviderType], Field(default=None))) for plat in list(PlatformsAliases) + list(Platforms) @@ -538,7 +537,10 @@ class DefaultTestPlatforms(StrEnum): (str(plat), (Optional[ProviderType], Field(default="azure"))) for plat in ("linux_64", "osx_64", "win_64") ] - ), + ) +Provider = create_model( + "provider", + **provider_fields, ) @@ -551,7 +553,7 @@ class ConfigModel(BaseModel): flagged as Deprecated as appropriate. """ - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="forbid") # Values which are not expected to be present in the model dump, are # flagged with exclude=True. This is to avoid confusion when comparing diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py index 229cc672b..6648ebe05 100644 --- a/conda_smithy/utils.py +++ b/conda_smithy/utils.py @@ -8,10 +8,13 @@ from contextlib import contextmanager from pathlib import Path from typing import Any, Dict, Union +from conda_build.metadata import MetaData import jinja2 import jinja2.sandbox import ruamel.yaml +from ruamel.yaml.comments import CommentedMap +from ruamel.yaml.main import YAML from conda_build.api import render as conda_build_render from conda_build.render import MetaData from rattler_build_conda_compat.render import MetaData as RattlerBuildMetaData @@ -56,7 +59,7 @@ def get_feedstock_name_from_meta( return meta.name() -def get_feedstock_about_from_meta(meta) -> dict: +def get_feedstock_about_from_meta(meta: MetaData) -> dict: """Fetch the feedtstock about from the parsed meta.yaml.""" # it turns out that conda_build would not preserve the feedstock about: # - if a subpackage does not have about, it uses the feedstock's @@ -110,7 +113,7 @@ def __init__(self): self.sep = "/" -def stub_compatible_pin(*args, **kwargs): +def stub_compatible_pin(*args, **kwargs) -> str: return f"compatible_pin {args[0]}" @@ -118,7 +121,7 @@ def stub_subpackage_pin(*args, **kwargs): return f"subpackage_pin {args[0]}" -def render_meta_yaml(text): +def render_meta_yaml(text: str) -> str: env = jinja2.sandbox.SandboxedEnvironment(undefined=NullUndefined) # stub out cb3 jinja2 functions - they are not important for linting @@ -172,7 +175,9 @@ def update_conda_forge_config(forge_yaml): get_yaml().dump(code, Path(forge_yaml)) -def merge_dict(src, dest): +def merge_dict( + src: Dict[Any, Any], dest: CommentedMap +) -> Union[CommentedMap, Dict[str, bool]]: """Recursive merge dictionary""" for key, value in src.items(): if isinstance(value, dict): diff --git a/conda_smithy/validate_schema.py b/conda_smithy/validate_schema.py index 555de599a..f6bc5e041 100644 --- a/conda_smithy/validate_schema.py +++ b/conda_smithy/validate_schema.py @@ -1,9 +1,15 @@ import json from pathlib import Path -from typing import List, Tuple - +from typing import ( + Tuple, + List, + Dict, + Optional, + Union, +) from jsonschema import Draft202012Validator, validators from jsonschema.exceptions import ValidationError +from ruamel.yaml.comments import CommentedMap CONDA_FORGE_YAML_DEFAULTS_FILE = ( Path(__file__).resolve().parent / "data" / "conda-forge.yml" @@ -39,8 +45,18 @@ def get_validator_class(): def validate_json_schema( - config, schema_file: str = None -) -> Tuple[List[ValidationError], List[ValidationError]]: + config: Union[ + CommentedMap, + Dict[str, Union[ + bool, + str, + List[str], + Dict[str, bool], + Dict[str, Dict[str, str]] + ]] + ], + schema_file: Optional[Path], +) -> tuple: # Validate the merged configuration against a JSON schema if not schema_file: schema_file = CONDA_FORGE_YAML_SCHEMA_FILE diff --git a/conda_smithy/variant_algebra.py b/conda_smithy/variant_algebra.py index aef4d0049..50a67a8d0 100644 --- a/conda_smithy/variant_algebra.py +++ b/conda_smithy/variant_algebra.py @@ -59,7 +59,7 @@ def parse_variant( def _version_order( - v: Union[str, float], ordering: Optional[List[str]] = None + v, ordering: Optional[List[str]] = None ) -> Union[int, VersionOrder, float]: if ordering is not None: return ordering.index(v) @@ -77,7 +77,7 @@ def variant_key_add( v_left: Union[List[str], List[float]], v_right: Union[List[str], List[float]], ordering: Optional[List[str]] = None, -) -> Union[List[str], List[float]]: +) -> List[Union[str,float]]: """Version summation adder. This takes the higher version of the two things. @@ -108,13 +108,23 @@ def variant_key_set_merge(k, v_left, v_right, ordering=None): return sorted(out_v, key=partial(_version_order, ordering=ordering)) -def variant_key_set_union(k, v_left, v_right, ordering=None): +def variant_key_set_union( + k: None, + v_left: List[Union[Any, str]], + v_right: List[Union[Any, str]], + ordering: Optional[List[str]] = None, +) -> List[str]: """Merges two sets in order, preserving all keys""" out_v = set(v_left) | set(v_right) return sorted(out_v, key=partial(_version_order, ordering=ordering)) -def op_variant_key_add(v1: dict, v2: dict): +def op_variant_key_add(v1: dict, v2: dict) -> Dict[ + str, + Union[ + float, List[str], List[List[str]], List[Union[List[str], str, float]] + ], +]: """Operator for performing a key-add key-add is additive so you will end up with more entries in the resulting dictionary @@ -144,7 +154,7 @@ def op_variant_key_add(v1: dict, v2: dict): if additional_zip_keys: for chunk in result.get("zip_keys", []): - zip_keyset = set(chunk) + zip_keyset: Union[frozenset, set] = set(chunk) if primary_key in zip_keyset: # The primary is already part of some zip_key, add the additional keys for additional_key in additional_zip_keys: @@ -221,7 +231,9 @@ def op_variant_key_add(v1: dict, v2: dict): return result -def op_variant_key_remove(v1: dict, v2: dict): +def op_variant_key_remove( + v1: dict, v2: dict +) -> Dict[str, Union[float, List[str], List[List[str]]]]: """Inverse of op_variant_key_add Will remove a given value from the field identified by primary_key and associated @@ -268,7 +280,7 @@ def op_variant_key_remove(v1: dict, v2: dict): } -def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: +def variant_add(v1, v2): """Adds the two variants together. Present this assumes mostly flat dictionaries. @@ -295,7 +307,7 @@ def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: right.remove("__migrator") # special keys in joint - special_variants = {} + special_variants: dict = {} if "pin_run_as_build" in joint: # For run_as_build we enforce the migrator's pin # TODO: should this just be a normal ordering merge, favoring more exact pins? @@ -311,7 +323,7 @@ def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: # That does require changes to conda-build itself though # # A zip_keys block is deemed mergeable if zkₛ,ᵢ ⊂ zkₘ,ᵢ - zk_out = [] + zk_out: List = [] zk_l = {frozenset(e) for e in v1["zip_keys"]} zk_r = {frozenset(e) for e in v2["zip_keys"]} From 2e12bf8a0ffe6d5493669fc832ee1a32cc034a37 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Thu, 20 Jun 2024 10:32:55 -0400 Subject: [PATCH 02/14] feat: Add type for tests --- tests/conftest.py | 20 ++- tests/test_anaconda_token_rotation.py | 74 ++++----- tests/test_ci_skeleton.py | 5 +- tests/test_cli.py | 29 ++-- tests/test_condaforge_config_schema.py | 4 + tests/test_configure_feedstock.py | 57 +++++-- tests/test_feedstock_io.py | 9 +- tests/test_feedstock_tokens.py | 200 +++++++++++++------------ tests/test_lint_recipe.py | 26 +++- tests/test_variant_algebra.py | 2 +- 10 files changed, 253 insertions(+), 173 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d16c0a6b6..b11624300 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,8 @@ _load_forge_config, conda_forge_content, ) +from _pytest._py.path import LocalPath +from _pytest.fixtures import SubRequest RecipeConfigPair = collections.namedtuple( "RecipeConfigPair", ("recipe", "config") @@ -26,7 +28,7 @@ class ConfigYAML(typing.NamedTuple): @pytest.fixture(scope="function") -def testing_workdir(tmpdir, request): +def testing_workdir(tmpdir: LocalPath, request: SubRequest) -> str: """Create a workdir in a safe temporary folder; cd into dir above before test, cd out after :param tmpdir: py.test fixture, will be injected @@ -59,8 +61,8 @@ def recipe_dirname(): @pytest.fixture(scope="function", params=["conda-build", "rattler-build"]) -def config_yaml(testing_workdir, recipe_dirname, request): - config = {"python": ["2.7", "3.5"], "r_base": ["3.3.2", "3.4.2"]} +def config_yaml(testing_workdir: str, recipe_dirname: str, request) -> str: + config: dict = {"python": ["2.7", "3.5"], "r_base": ["3.3.2", "3.4.2"]} os.makedirs(os.path.join(testing_workdir, recipe_dirname)) with open(os.path.join(testing_workdir, "config.yaml"), "w") as f: f.write("docker:\n") @@ -118,7 +120,7 @@ def config_yaml(testing_workdir, recipe_dirname, request): @pytest.fixture(scope="function") -def noarch_recipe(config_yaml: ConfigYAML, recipe_dirname): +def noarch_recipe(config_yaml: ConfigYAML, recipe_dirname: str) -> RecipeConfigPair: # get the used params passed for config_yaml fixture with open( os.path.join( @@ -256,7 +258,9 @@ def stdlib_recipe(config_yaml: ConfigYAML): @pytest.fixture(scope="function") -def stdlib_deployment_target_recipe(config_yaml: ConfigYAML, stdlib_recipe): +def stdlib_deployment_target_recipe( + config_yaml: ConfigYAML, stdlib_recipe: RecipeConfigPair +) -> RecipeConfigPair: # append to existing stdlib_config.yaml from stdlib_recipe with open( os.path.join(config_yaml.workdir, "recipe", "stdlib_config.yaml"), "a" @@ -364,8 +368,8 @@ def recipe_migration_cfep9(config_yaml: ConfigYAML): @pytest.fixture(scope="function") def recipe_migration_cfep9_downgrade( - config_yaml: ConfigYAML, recipe_migration_cfep9 -): + config_yaml: ConfigYAML, recipe_migration_cfep9: RecipeConfigPair +)-> RecipeConfigPair: # write a downgrade migrator that lives next to the current migrator. # Only this, more recent migrator should apply. os.makedirs( @@ -402,7 +406,7 @@ def recipe_migration_cfep9_downgrade( @pytest.fixture(scope="function") -def recipe_migration_win_compiled(config_yaml: ConfigYAML, py_recipe): +def recipe_migration_win_compiled(config_yaml: ConfigYAML, py_recipe: RecipeConfigPair) -> RecipeConfigPair: os.makedirs( os.path.join(config_yaml.workdir, ".ci_support", "migrations"), exist_ok=True, diff --git a/tests/test_anaconda_token_rotation.py b/tests/test_anaconda_token_rotation.py index 817d5caa1..ddc44ac56 100644 --- a/tests/test_anaconda_token_rotation.py +++ b/tests/test_anaconda_token_rotation.py @@ -4,6 +4,8 @@ from conda_smithy.anaconda_token_rotation import rotate_anaconda_token from conda_smithy.ci_register import drone_default_endpoint +from unittest.mock import MagicMock +from _pytest.monkeypatch import MonkeyPatch @pytest.mark.parametrize("appveyor", [True, False]) @@ -23,20 +25,20 @@ "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" ) def test_rotate_anaconda_token( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, - get_ac_token, - get_gh_token, - appveyor, - drone, - circle, - azure, - travis, - github_actions, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + appveyor_mock: MagicMock, + get_ac_token: MagicMock, + get_gh_token: MagicMock, + appveyor: bool, + drone: bool, + circle: bool, + azure: bool, + travis: bool, + github_actions: bool, ): user = "foo" project = "bar" @@ -131,19 +133,19 @@ def test_rotate_anaconda_token( "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" ) def test_rotate_anaconda_token_notoken( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, - appveyor, - drone, - circle, - azure, - travis, - github_actions, - monkeypatch, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + appveyor_mock: MagicMock, + appveyor: bool, + drone: bool, + circle: bool, + azure: bool, + travis: bool, + github_actions: bool, + monkeypatch: MonkeyPatch, ): user = "foo" project = "bar" @@ -187,15 +189,15 @@ def test_rotate_anaconda_token_notoken( "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" ) def test_rotate_anaconda_token_provider_error( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, - get_ac_token, - get_gh_token, - provider, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + appveyor_mock: MagicMock, + get_ac_token: MagicMock, + get_gh_token: MagicMock, + provider: str, ): user = "foo" project = "bar" diff --git a/tests/test_ci_skeleton.py b/tests/test_ci_skeleton.py index 9cc957223..f97e1851f 100644 --- a/tests/test_ci_skeleton.py +++ b/tests/test_ci_skeleton.py @@ -1,3 +1,6 @@ +import pytest +from _pytest._py.path import LocalPath + from conda_smithy.ci_skeleton import generate CONDA_FORGE_YML = """clone_depth: 0 @@ -92,7 +95,7 @@ """ -def test_generate(tmpdir): +def test_generate(tmpdir: LocalPath): generate( package_name="my-package", feedstock_directory=str(tmpdir), diff --git a/tests/test_cli.py b/tests/test_cli.py index e51027a4d..8b4b8257a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -7,18 +7,20 @@ import pytest import yaml +from typing import Union from conda_smithy import cli +from conftest import RecipeConfigPair _thisdir = os.path.abspath(os.path.dirname(__file__)) InitArgs = collections.namedtuple( - "ArgsObject", + "InitArgs", ("recipe_directory", "feedstock_directory", "temporary_directory"), ) RegenerateArgs = collections.namedtuple( - "ArgsObject", + "RegenerateArgs", ( "commit", "feedstock_directory", @@ -31,7 +33,7 @@ ) -def test_init(py_recipe): +def test_init(py_recipe: RecipeConfigPair): """This is the command that takes the initial staged-recipe folder and turns it into a feedstock""" # actual parser doesn't matter. It's used for initialization only @@ -50,7 +52,7 @@ def test_init(py_recipe): assert os.path.isdir(destination) -def test_init_with_custom_config(py_recipe): +def test_init_with_custom_config(py_recipe: RecipeConfigPair): """This is the command that takes the initial staged-recipe folder and turns it into a feedstock""" # actual parser doesn't matter. It's used for initialization only @@ -87,7 +89,7 @@ def test_init_with_custom_config(py_recipe): assert data["bot"]["run_deps_from_wheel"] is True -def test_init_multiple_output_matrix(testing_workdir): +def test_init_multiple_output_matrix(testing_workdir: str): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -96,6 +98,7 @@ def test_init_multiple_output_matrix(testing_workdir): feedstock_dir = os.path.join( testing_workdir, "multiple-outputs-test-feedstock" ) + args: Union[InitArgs, RegenerateArgs] args = InitArgs( recipe_directory=recipe, feedstock_directory=feedstock_dir, @@ -143,7 +146,9 @@ def test_init_multiple_output_matrix(testing_workdir): @pytest.mark.parametrize( "dirname", ["multiple_outputs", "multiple_outputs2", "multiple_outputs3"] ) -def test_render_readme_with_multiple_outputs(testing_workdir, dirname): +def test_render_readme_with_multiple_outputs( + testing_workdir: str, dirname: str +): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -153,6 +158,7 @@ def test_render_readme_with_multiple_outputs(testing_workdir, dirname): feedstock_dir = os.path.join( testing_workdir, "multiple-outputs-test-feedstock" ) + args: Union[InitArgs, RegenerateArgs] args = InitArgs( recipe_directory=recipe, feedstock_directory=feedstock_dir, @@ -200,7 +206,7 @@ def test_render_readme_with_multiple_outputs(testing_workdir, dirname): assert False -def test_init_cuda_docker_images(testing_workdir): +def test_init_cuda_docker_images(testing_workdir: str): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -209,6 +215,7 @@ def test_init_cuda_docker_images(testing_workdir): feedstock_dir = os.path.join( testing_workdir, "cuda_docker_images-feedstock" ) + args: Union[InitArgs, RegenerateArgs] args = InitArgs( recipe_directory=recipe, feedstock_directory=feedstock_dir, @@ -254,7 +261,7 @@ def test_init_cuda_docker_images(testing_workdir): assert config["cdt_name"] == ["cos6"] -def test_init_multiple_docker_images(testing_workdir): +def test_init_multiple_docker_images(testing_workdir: str): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -263,6 +270,7 @@ def test_init_multiple_docker_images(testing_workdir): feedstock_dir = os.path.join( testing_workdir, "multiple_docker_images-feedstock" ) + args: Union[InitArgs, RegenerateArgs] args = InitArgs( recipe_directory=recipe, feedstock_directory=feedstock_dir, @@ -296,7 +304,7 @@ def test_init_multiple_docker_images(testing_workdir): assert config["cdt_name"] == ["pickme_1"] -def test_regenerate(py_recipe, testing_workdir): +def test_regenerate(py_recipe: RecipeConfigPair, testing_workdir: str): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() regen_obj = cli.Regenerate(subparser) @@ -330,7 +338,7 @@ def test_regenerate(py_recipe, testing_workdir): assert len(os.listdir(matrix_folder)) == 4 -def test_render_variant_mismatches(testing_workdir): +def test_render_variant_mismatches(testing_workdir: str): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -340,6 +348,7 @@ def test_render_variant_mismatches(testing_workdir): feedstock_dir = os.path.join( testing_workdir, "test-variant-mismatches-feedstock" ) + args: Union[InitArgs, RegenerateArgs] args = InitArgs( recipe_directory=recipe, feedstock_directory=feedstock_dir, diff --git a/tests/test_condaforge_config_schema.py b/tests/test_condaforge_config_schema.py index b4e08b586..c21ec40e8 100644 --- a/tests/test_condaforge_config_schema.py +++ b/tests/test_condaforge_config_schema.py @@ -2,6 +2,10 @@ from pydantic import ValidationError from conda_smithy.schema import ConfigModel +from typing import ( + Dict, + Union, +) # Sample config files SAMPLE_CONFIGS = [ diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 8fef3d1f6..ad1a06020 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -10,6 +10,17 @@ import pytest import yaml from conftest import ConfigYAML +from conftest import RecipeConfigPair +from _pytest.logging import LogCaptureFixture +from jinja2.sandbox import SandboxedEnvironment +from typing import ( + Any, + Dict, + List, + Set, + Tuple, + Union, +) from conda_smithy import configure_feedstock from conda_smithy.configure_feedstock import _read_forge_config @@ -382,7 +393,9 @@ def test_upload_on_branch_appveyor(upload_on_branch_recipe, jinja_env): assert "UPLOAD_ON_BRANCH=foo-branch" in content["deploy_script"][-2] -def test_circle_with_yum_reqs(py_recipe, jinja_env): +def test_circle_with_yum_reqs( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): with open( os.path.join(py_recipe.recipe, "recipe", "yum_requirements.txt"), "w" ) as f: @@ -395,7 +408,9 @@ def test_circle_with_yum_reqs(py_recipe, jinja_env): @pytest.mark.legacy_circle -def test_circle_with_empty_yum_reqs_raises(py_recipe, jinja_env): +def test_circle_with_empty_yum_reqs_raises( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): py_recipe.config["provider"]["linux"] = "circle" with open( @@ -410,7 +425,9 @@ def test_circle_with_empty_yum_reqs_raises(py_recipe, jinja_env): ) -def test_azure_with_empty_yum_reqs_raises(py_recipe, jinja_env): +def test_azure_with_empty_yum_reqs_raises( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): with open( os.path.join(py_recipe.recipe, "recipe", "yum_requirements.txt"), "w" ) as f: @@ -425,7 +442,9 @@ def test_azure_with_empty_yum_reqs_raises(py_recipe, jinja_env): @pytest.mark.legacy_circle @pytest.mark.legacy_travis -def test_circle_osx(py_recipe, jinja_env): +def test_circle_osx( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): # Set legacy providers py_recipe.config["provider"]["osx"] = "travis" py_recipe.config["provider"]["linux"] = "circle" @@ -476,7 +495,9 @@ def test_circle_osx(py_recipe, jinja_env): assert os.path.exists(circle_config_file) -def test_circle_skipped(linux_skipped_recipe, jinja_env): +def test_circle_skipped( + linux_skipped_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): forge_dir = linux_skipped_recipe.recipe circle_osx_file = os.path.join(forge_dir, ".scripts", "run_osx_build.sh") circle_linux_file = os.path.join( @@ -747,8 +768,10 @@ def test_migrator_compiler_version_recipe( assert "win_64_c_compilervs2017python3.5.yaml" in rendered_variants -def test_files_skip_render(render_skipped_recipe, jinja_env): - configure_feedstock.render_readme( +def test_files_skip_render( + render_skipped_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): + configure_feedstock.render_README( jinja_env=jinja_env, forge_config=render_skipped_recipe.config, forge_dir=render_skipped_recipe.recipe, @@ -768,7 +791,9 @@ def test_files_skip_render(render_skipped_recipe, jinja_env): assert not os.path.exists(fpath) -def test_choco_install(choco_recipe, jinja_env): +def test_choco_install( + choco_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): configure_feedstock.render_azure( jinja_env=jinja_env, forge_config=choco_recipe.config, @@ -811,7 +836,9 @@ def test_webservices_action_exists(py_recipe, jinja_env): assert "webservices" in action_config["jobs"] -def test_automerge_action_exists(py_recipe, jinja_env): +def test_automerge_action_exists( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): configure_feedstock.render_github_actions_services( jinja_env=jinja_env, forge_config=py_recipe.config, @@ -890,7 +917,9 @@ def load_forge_config(forge_yml): assert load_forge_config(forge_yml_alt)["recipe_dir"] == "recipe" -def test_cos7_env_render(py_recipe, jinja_env): +def test_cos7_env_render( + py_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): forge_config = copy.deepcopy(py_recipe.config) forge_config["os_version"] = {"linux_64": "cos7"} has_env = "DEFAULT_LINUX_VERSION" in os.environ @@ -922,7 +951,9 @@ def test_cos7_env_render(py_recipe, jinja_env): del os.environ["DEFAULT_LINUX_VERSION"] -def test_cuda_enabled_render(cuda_enabled_recipe, jinja_env): +def test_cuda_enabled_render( + cuda_enabled_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment +): forge_config = copy.deepcopy(cuda_enabled_recipe.config) has_env = "CF_CUDA_ENABLED" in os.environ if has_env: @@ -953,7 +984,7 @@ def test_cuda_enabled_render(cuda_enabled_recipe, jinja_env): del os.environ["CF_CUDA_ENABLED"] -def test_conda_build_tools(config_yaml: ConfigYAML, caplog): +def test_conda_build_tools(config_yaml: ConfigYAML, caplog: LogCaptureFixture): load_forge_config = lambda: configure_feedstock._load_forge_config( # noqa config_yaml.workdir, exclusive_config_file=os.path.join( @@ -1981,7 +2012,7 @@ def test_get_used_key_values_by_input_order( assert used_key_values == expected_used_key_values -def test_conda_build_api_render_for_smithy(testing_workdir): +def test_conda_build_api_render_for_smithy(testing_workdir: str): import conda_build.api _thisdir = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index f471b6929..3aefb9c86 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -10,17 +10,22 @@ import git from git.index.typ import BlobFilter +from typing import ( + Iterator, +) import conda_smithy.feedstock_io as fio -def keep_dir(dirname): +def keep_dir(dirname: str): keep_filename = os.path.join(dirname, ".keep") with open(keep_filename, "w", encoding="utf-8", newline="\n") as fh: fh.write("") -def parameterize(): +def parameterize() -> ( + Iterator[tuple] +): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth, diff --git a/tests/test_feedstock_tokens.py b/tests/test_feedstock_tokens.py index 888c1e95b..714791491 100644 --- a/tests/test_feedstock_tokens.py +++ b/tests/test_feedstock_tokens.py @@ -5,6 +5,9 @@ import pytest import scrypt +from _pytest._py.path import LocalPath +from typing import Optional, Any +from unittest.mock import MagicMock from conda_smithy.ci_register import drone_default_endpoint from conda_smithy.feedstock_tokens import ( @@ -52,17 +55,17 @@ @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_feedstock_tokens_roundtrip( - gh_mock, - git_mock, - tmp_mock, - tmpdir, - repo, - project, - provider, - ci, - retval_ci, - expires_at, - retval_time, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + tmpdir: LocalPath, + repo: str, + project: str, + provider: Optional[str], + ci: Optional[str], + retval_ci: bool, + expires_at: float, + retval_time: bool, ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -118,13 +121,13 @@ def test_feedstock_tokens_roundtrip( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_is_valid_feedstock_token_nofile( - gh_mock, - git_mock, - tmp_mock, - tmpdir, - repo, - project, - ci, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + tmpdir: LocalPath, + repo: str, + project: str, + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -161,15 +164,15 @@ def test_is_valid_feedstock_token_nofile( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_is_valid_feedstock_token_badtoken( - gh_mock, - git_mock, - tmp_mock, - tmpdir, - repo, - project, - expires_at, - provider, - ci, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + tmpdir: LocalPath, + repo: str, + project: str, + expires_at: str, + provider: Optional[str], + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -196,7 +199,7 @@ def test_is_valid_feedstock_token_badtoken( @pytest.mark.parametrize("ci", [None, "azure"]) -def test_generate_and_write_feedstock_token(ci): +def test_generate_and_write_feedstock_token(ci: Optional[str]): user = "bar" repo = "foo" @@ -229,7 +232,7 @@ def test_generate_and_write_feedstock_token(ci): @pytest.mark.parametrize("ci", [None, "azure"]) -def test_read_feedstock_token(ci): +def test_read_feedstock_token(ci: Optional[str]): user = "bar" repo = "foo" if ci: @@ -238,6 +241,7 @@ def test_read_feedstock_token(ci): pth = os.path.expanduser("~/.conda-smithy/bar_foo.token") # no token + err: Any token, err = read_feedstock_token(user, repo, provider=ci) assert "No token found in" in err assert token is None @@ -304,18 +308,18 @@ def test_read_feedstock_token(ci): @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_feedstock_token_exists( - gh_mock, - git_mock, - tmp_mock, - tmpdir, - repo, - project, - file_exists, - ci, - provider, - retval_ci, - expires_at, - retval_time, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + tmpdir: LocalPath, + repo: str, + project: str, + file_exists: bool, + ci: Optional[str], + provider: Optional[str], + retval_ci: bool, + expires_at: float, + retval_time: bool, ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -328,7 +332,7 @@ def test_feedstock_token_exists( with open( os.path.join(tmpdir, "tokens", f"{project}.json"), "w" ) as fp: - data = {"tokens": [{}]} + data: dict = {"tokens": [{}]} if provider is not None: data["tokens"][0]["provider"] = provider if expires_at is not None: @@ -355,7 +359,13 @@ def test_feedstock_token_exists( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_feedstock_token_raises( - gh_mock, git_mock, tmp_mock, tmpdir, repo, project, ci + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + tmpdir: LocalPath, + repo: str, + project: str, + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -390,14 +400,14 @@ def test_feedstock_token_raises( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_register_feedstock_token_works( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, - tmpdir, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + osuran_mock: MagicMock, + secrets_mock: MagicMock, + tmpdir: LocalPath, repo, - ci, + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -463,14 +473,14 @@ def test_register_feedstock_token_works( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_register_feedstock_token_notoken( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, - tmpdir, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + osuran_mock: MagicMock, + secrets_mock: MagicMock, + tmpdir: LocalPath, repo, - ci, + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -519,14 +529,14 @@ def test_register_feedstock_token_notoken( @mock.patch("conda_smithy.feedstock_tokens.git") @mock.patch("conda_smithy.github.gh_token") def test_register_feedstock_token_append( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, - tmpdir, + gh_mock: MagicMock, + git_mock: MagicMock, + tmp_mock: MagicMock, + osuran_mock: MagicMock, + secrets_mock: MagicMock, + tmpdir: LocalPath, repo, - ci, + ci: Optional[str], ): gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( @@ -597,18 +607,18 @@ def test_register_feedstock_token_append( "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" ) def test_register_feedstock_token_with_providers( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - drone, - circle, - azure, - travis, - github_actions, - clobber, - unique_token_per_provider, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + drone: bool, + circle: bool, + azure: bool, + travis: bool, + github_actions: bool, + clobber: bool, + unique_token_per_provider: bool, ): user = "foo" project = "bar" @@ -735,18 +745,18 @@ def test_register_feedstock_token_with_providers( "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" ) def test_register_feedstock_token_with_providers_notoken( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - drone, - circle, - azure, - travis, - github_actions, - clobber, - unique_token_per_provider, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + drone: bool, + circle: bool, + azure: bool, + travis: bool, + github_actions: bool, + clobber: bool, + unique_token_per_provider: bool, ): user = "foo" project = "bar" @@ -786,13 +796,13 @@ def test_register_feedstock_token_with_providers_notoken( "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" ) def test_register_feedstock_token_with_providers_error( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - provider, - unique_token_per_provider, + github_actions_mock: MagicMock, + azure_mock: MagicMock, + travis_mock: MagicMock, + circle_mock: MagicMock, + drone_mock: MagicMock, + provider: str, + unique_token_per_provider: bool, ): user = "foo" project = "bar-feedstock" diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index d2515db1c..2f7f603d5 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -12,6 +12,12 @@ import github import pytest +from typing import ( + Iterator, + List, + Optional, + Tuple, +) import conda_smithy.lint_recipe as linter from conda_smithy.linter.utils import VALID_PYTHON_BUILD_BACKENDS @@ -20,7 +26,7 @@ _thisdir = os.path.abspath(os.path.dirname(__file__)) -def is_gh_token_set(): +def is_gh_token_set() -> bool: return "GH_TOKEN" in os.environ @@ -44,7 +50,7 @@ def get_recipe_in_dir(recipe_name: str) -> Path: @contextmanager -def tmp_directory(): +def tmp_directory() -> Iterator[str]: tmp_dir = tempfile.mkdtemp("recipe_") yield tmp_dir shutil.rmtree(tmp_dir) @@ -54,7 +60,7 @@ def tmp_directory(): "comp_lang", ["c", "cxx", "fortran", "rust", "m2w64_c", "m2w64_cxx", "m2w64_fortran"], ) -def test_stdlib_lint(comp_lang): +def test_stdlib_hint(comp_lang: str): expected_message = "This recipe is using a compiler" with tmp_directory() as recipe_dir: @@ -122,7 +128,7 @@ def test_sysroot_lint(): @pytest.mark.parametrize("where", ["run", "run_constrained"]) -def test_osx_lint(where): +def test_osx_hint(where: str): expected_message = "You're setting a constraint on the `__osx` virtual" with tmp_directory() as recipe_dir: @@ -188,7 +194,7 @@ def test_stdlib_lints_multi_output(): @pytest.mark.parametrize("where", ["run", "run_constrained"]) -def test_osx_noarch_hint(where): +def test_osx_noarch_hint(where: str): # don't warn on packages that are using __osx as a noarch-marker, see # https://conda-forge.org/docs/maintainer/knowledge_base/#noarch-packages-with-os-specific-dependencies avoid_message = "You're setting a constraint on the `__osx` virtual" @@ -287,8 +293,14 @@ def test_recipe_v1_osx_noarch_hint(): (None, None, ["10.12", "11.0"], "You are"), ], ) -def test_cbc_osx_lints( - std_selector, with_linux, reverse_arch, macdt, v_std, sdk, exp_lint +def test_cbc_osx_hints( + std_selector, + with_linux: bool, + reverse_arch: Tuple[bool, bool, bool], + macdt: Optional[List[str]], + v_std: Optional[List[str]], + sdk: Optional[List[str]], + exp_hint: Optional[str], ): with tmp_directory() as rdir: with open(os.path.join(rdir, "meta.yaml"), "w") as fh: diff --git a/tests/test_variant_algebra.py b/tests/test_variant_algebra.py index 352ae5fa2..802500983 100644 --- a/tests/test_variant_algebra.py +++ b/tests/test_variant_algebra.py @@ -584,7 +584,7 @@ def test_variant_key_remove(): @pytest.mark.parametrize( "platform,arch", [["osx", "64"], ["osx", "arm64"], ["linux", "64"]] ) -def test_variant_remove_add(platform, arch): +def test_variant_remove_add(platform: str, arch: str): from conda_build.config import Config config = Config(platform=platform, arch=arch) From 9306cc562d5e402ecd5c0ad4671c53cf087e45c3 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Fri, 7 Jun 2024 11:53:52 -0400 Subject: [PATCH 03/14] ci: Add mypy to workflow and environment Add mypy to GitHub workflow and to the environment yml --- .github/workflows/tests.yml | 4 ++++ environment.yml | 1 + 2 files changed, 5 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 342ac1215..bf4bcfbf2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -66,6 +66,10 @@ jobs: env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: run type check + run: | + mypy conda_smithy tests + - name: coveralls uses: coverallsapp/github-action@master with: diff --git a/environment.yml b/environment.yml index 2c14cd23c..80042ada8 100644 --- a/environment.yml +++ b/environment.yml @@ -37,3 +37,4 @@ dependencies: - backports.strenum - exceptiongroup - rattler-build-conda-compat >=1.2.0,<2.0.0a0 + - mypy From e4c183ffebcb66363625dda2f7eab4544cc3ce75 Mon Sep 17 00:00:00 2001 From: Alex Feyerke Date: Wed, 19 Jun 2024 12:55:38 +0200 Subject: [PATCH 04/14] chore: install missing type dependencies --- environment.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/environment.yml b/environment.yml index 80042ada8..34d8b6f10 100644 --- a/environment.yml +++ b/environment.yml @@ -14,6 +14,10 @@ dependencies: - mock - pytest - pytest-cov + - types-jsonschema + - types-requests + - types-PyYAML + - types-simplejson # Runtime dependencies - conda >=4.2 - conda-build >=24.3 From e967560ac48a51a87dac126f15450783dbf23c66 Mon Sep 17 00:00:00 2001 From: Alex Feyerke Date: Wed, 19 Jun 2024 12:56:04 +0200 Subject: [PATCH 05/14] chore: ignore remaining missing imports in mypy --- pyproject.toml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 23cd8a77a..02184244c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,3 +72,17 @@ markers = [ "legacy_appveyor: Test designed to run as if prior to the azure migration", "cli: CLI tests outside of test/test_cli.py", ] + +[[tool.mypy.overrides]] +module = [ + # Conda build does not provide types: https://github.com/conda/conda-build/pull/4878#issuecomment-1543416104 + 'conda_build.*', + # `vsts` (conda) aka. `vsts-python-api` (pypi) seems to have been deprecated (https://github.com/microsoft/azure-devops-python-api/issues/440), and the successor library doesn’t seem to be a drop-in replacement. + 'vsts.*', + # The following are rarely used in this project and might benefit from having custom stubs written for them, see https://mypy.readthedocs.io/en/stable/stubs.html#stub-files + 'license_expression', + 'cirun', + 'scrypt', + 'toolz' +] +ignore_missing_imports = true From 7348daf5cc8c8a7f52cf543b9865076d16534f39 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Wed, 19 Jun 2024 15:41:46 -0400 Subject: [PATCH 06/14] fix: Add type ignore Add mypy type ignore for some tests and conda smithy files, which couldn't be resolved. --- conda_smithy/configure_feedstock.py | 2 +- conda_smithy/schema.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index d4fd1a5b2..25e53c844 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -2280,7 +2280,7 @@ def render_README( "Azure build_id can't be retrieved using the Azure token. Exception: %s", err, ) - except json.decoder.JSONDecodeError: + except json.decoder.JSONDecodeError: # type: ignore azure_build_id_from_token(forge_config) logger.debug("README") diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index a3d8472be..d945c3673 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -11,7 +11,7 @@ from pydantic import BaseModel, ConfigDict, Field, create_model try: - from enum import StrEnum + from enum import StrEnum # type: ignore except ImportError: from backports.strenum import StrEnum @@ -411,7 +411,7 @@ class BotConfig(BaseModel): ) version_updates: Optional[BotConfigVersionUpdates] = Field( - default_factory=BotConfigVersionUpdates, + default_factory=BotConfigVersionUpdates, # type: ignore description="Bot config for version update PRs", ) @@ -636,7 +636,7 @@ class ConfigModel(BaseModel): ) bot: Optional[BotConfig] = Field( - default_factory=BotConfig, + default_factory=BotConfig, # type: ignore description=cleandoc( """ This dictates the behavior of the conda-forge auto-tick bot which issues @@ -684,7 +684,7 @@ class ConfigModel(BaseModel): ), ) - build_platform: Optional[BuildPlatform] = Field( + build_platform: Optional[BuildPlatform] = Field( # type: ignore default_factory=BuildPlatform, description=cleandoc( """ @@ -778,7 +778,7 @@ class ConfigModel(BaseModel): ), ) - noarch_platforms: Optional[Union[Platforms, List[Platforms]]] = Field( + noarch_platforms: Optional[Union[Platforms, List[Platforms]]] = Field( # type: ignore default_factory=lambda: ["linux_64"], description=cleandoc( """ @@ -801,7 +801,7 @@ class ConfigModel(BaseModel): ), ) - os_version: Optional[OSVersion] = Field( + os_version: Optional[OSVersion] = Field( # type: ignore default_factory=OSVersion, description=cleandoc( """ @@ -819,7 +819,7 @@ class ConfigModel(BaseModel): ), ) - provider: Optional[Provider] = Field( + provider: Optional[Provider] = Field( # type: ignore default_factory=Provider, description=cleandoc( """ From 899773b44bf04ea75f3ee1b3e414f3ea26a97404 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Thu, 20 Jun 2024 11:09:45 -0400 Subject: [PATCH 07/14] chore: Reformat files with black and add news entry --- conda_smithy/anaconda_token_rotation.py | 6 +--- conda_smithy/configure_feedstock.py | 33 ++++++----------- conda_smithy/feedstock_io.py | 7 +--- conda_smithy/schema.py | 47 +++++++++++++------------ conda_smithy/validate_schema.py | 15 ++++---- conda_smithy/variant_algebra.py | 2 +- news/1957_add_typing.rst | 23 ++++++++++++ tests/test_feedstock_io.py | 4 +-- 8 files changed, 71 insertions(+), 66 deletions(-) create mode 100644 news/1957_add_typing.rst diff --git a/conda_smithy/anaconda_token_rotation.py b/conda_smithy/anaconda_token_rotation.py index 41f5c76b3..45dac4b53 100644 --- a/conda_smithy/anaconda_token_rotation.py +++ b/conda_smithy/anaconda_token_rotation.py @@ -13,11 +13,7 @@ import sys from contextlib import redirect_stderr, redirect_stdout from github import Github -from typing import ( - List, - Optional, - Union -) +from typing import List, Optional, Union import requests from github import Github diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 25e53c844..e8ad9ce8e 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -47,7 +47,7 @@ try: import simplejson as json except ImportError: - import json # type: ignore + import json # type: ignore from conda.models.match_spec import MatchSpec from conda.models.version import VersionOrder @@ -125,10 +125,7 @@ def warn_once(msg: str): def package_key( - config: Dict[ - str, - Union[List[str], List[List[str]], OrderedDict] - ], + config: Dict[str, Union[List[str], List[List[str]], OrderedDict]], used_loop_vars: Set[str], subdir: str, ) -> str: @@ -196,9 +193,7 @@ def merge_list_of_dicts( return squished_dict -def argsort( - seq: List[tuple] -) -> List[int]: +def argsort(seq: List[tuple]) -> List[int]: return sorted(range(len(seq)), key=seq.__getitem__) @@ -364,9 +359,7 @@ def _trim_unused_zip_keys(all_used_vars): del all_used_vars["zip_keys"] -def _trim_unused_pin_run_as_build( - all_used_vars: dict -): +def _trim_unused_pin_run_as_build(all_used_vars: dict): """Remove unused keys in pin_run_as_build sets""" pkgs = all_used_vars.get("pin_run_as_build", {}) used_pkgs = {} @@ -388,10 +381,7 @@ def _get_used_key_values_by_input_order( Union[set, dict, list, tuple], ], ], - squished_used_variants: Union[ - OrderedDict, - dict - ], + squished_used_variants: Union[OrderedDict, dict], all_used_vars: Set[str], ) -> tuple: used_key_values = { @@ -728,9 +718,7 @@ def _collapse_subpackage_variants( ) -def _yaml_represent_ordereddict( - yaml_representer, data: OrderedDict -): +def _yaml_represent_ordereddict(yaml_representer, data: OrderedDict): # represent_dict processes dict-likes with a .sort() method or plain iterables of key-value # pairs. Only for the latter it never sorts and retains the order of the OrderedDict. return yaml.representer.SafeRepresenter.represent_dict( @@ -959,9 +947,10 @@ def migrate_combined_spec( def _conda_build_api_render_for_smithy( recipe_path: str, - config = None, - variants: Optional[Dict[str, Union[List[str], List[List[str]], Dict[str, Dict[str, str]]]]] - = None, + config=None, + variants: Optional[ + Dict[str, Union[List[str], List[List[str]], Dict[str, Dict[str, str]]]] + ] = None, permit_unsatisfiable_variants: bool = True, finalize: bool = True, bypass_env_check: bool = False, @@ -2280,7 +2269,7 @@ def render_README( "Azure build_id can't be retrieved using the Azure token. Exception: %s", err, ) - except json.decoder.JSONDecodeError: # type: ignore + except json.decoder.JSONDecodeError: # type: ignore azure_build_id_from_token(forge_config) logger.debug("README") diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index f0297a4af..7660bdac1 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -4,12 +4,7 @@ import shutil import stat from io import TextIOWrapper -from typing import ( - Iterator, - Optional, - Any, - Union -) +from typing import Iterator, Optional, Any, Union def get_repo( path: str, search_parent_directories: bool = True diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index d945c3673..66b3b6236 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -11,7 +11,7 @@ from pydantic import BaseModel, ConfigDict, Field, create_model try: - from enum import StrEnum # type: ignore + from enum import StrEnum # type: ignore except ImportError: from backports.strenum import StrEnum @@ -411,7 +411,7 @@ class BotConfig(BaseModel): ) version_updates: Optional[BotConfigVersionUpdates] = Field( - default_factory=BotConfigVersionUpdates, # type: ignore + default_factory=BotConfigVersionUpdates, # type: ignore description="Bot config for version update PRs", ) @@ -507,20 +507,21 @@ class DefaultTestPlatforms(StrEnum): native = "native" native_and_emulated = "native_and_emulated" + buildPlatform_fields: Dict[str, Any] = { - platform.value: (Optional[Platforms], Field(default=platform.value)) - for platform in Platforms - } + platform.value: (Optional[Platforms], Field(default=platform.value)) + for platform in Platforms +} BuildPlatform = create_model( "build_platform", **buildPlatform_fields, ) OSVersion_fields: Dict[str, Any] = { - platform.value: (Optional[Union[str, Nullable]], Field(default=None)) - for platform in Platforms - if platform.value.startswith("linux") - } + platform.value: (Optional[Union[str, Nullable]], Field(default=None)) + for platform in Platforms + if platform.value.startswith("linux") +} OSVersion = create_model( "os_version", **OSVersion_fields, @@ -529,15 +530,15 @@ class DefaultTestPlatforms(StrEnum): ProviderType = Union[List[CIservices], CIservices, bool, Nullable] provider_fields: Dict[str, Any] = dict( - [ - (str(plat), (Optional[ProviderType], Field(default=None))) - for plat in list(PlatformsAliases) + list(Platforms) - ] - + [ - (str(plat), (Optional[ProviderType], Field(default="azure"))) - for plat in ("linux_64", "osx_64", "win_64") - ] - ) + [ + (str(plat), (Optional[ProviderType], Field(default=None))) + for plat in list(PlatformsAliases) + list(Platforms) + ] + + [ + (str(plat), (Optional[ProviderType], Field(default="azure"))) + for plat in ("linux_64", "osx_64", "win_64") + ] +) Provider = create_model( "provider", **provider_fields, @@ -636,7 +637,7 @@ class ConfigModel(BaseModel): ) bot: Optional[BotConfig] = Field( - default_factory=BotConfig, # type: ignore + default_factory=BotConfig, # type: ignore description=cleandoc( """ This dictates the behavior of the conda-forge auto-tick bot which issues @@ -684,7 +685,7 @@ class ConfigModel(BaseModel): ), ) - build_platform: Optional[BuildPlatform] = Field( # type: ignore + build_platform: Optional[BuildPlatform] = Field( # type: ignore default_factory=BuildPlatform, description=cleandoc( """ @@ -778,7 +779,7 @@ class ConfigModel(BaseModel): ), ) - noarch_platforms: Optional[Union[Platforms, List[Platforms]]] = Field( # type: ignore + noarch_platforms: Optional[Union[Platforms, List[Platforms]]] = Field( # type: ignore default_factory=lambda: ["linux_64"], description=cleandoc( """ @@ -801,7 +802,7 @@ class ConfigModel(BaseModel): ), ) - os_version: Optional[OSVersion] = Field( # type: ignore + os_version: Optional[OSVersion] = Field( # type: ignore default_factory=OSVersion, description=cleandoc( """ @@ -819,7 +820,7 @@ class ConfigModel(BaseModel): ), ) - provider: Optional[Provider] = Field( # type: ignore + provider: Optional[Provider] = Field( # type: ignore default_factory=Provider, description=cleandoc( """ diff --git a/conda_smithy/validate_schema.py b/conda_smithy/validate_schema.py index f6bc5e041..0ca19e841 100644 --- a/conda_smithy/validate_schema.py +++ b/conda_smithy/validate_schema.py @@ -47,13 +47,16 @@ def get_validator_class(): def validate_json_schema( config: Union[ CommentedMap, - Dict[str, Union[ - bool, + Dict[ str, - List[str], - Dict[str, bool], - Dict[str, Dict[str, str]] - ]] + Union[ + bool, + str, + List[str], + Dict[str, bool], + Dict[str, Dict[str, str]], + ], + ], ], schema_file: Optional[Path], ) -> tuple: diff --git a/conda_smithy/variant_algebra.py b/conda_smithy/variant_algebra.py index 50a67a8d0..61f214804 100644 --- a/conda_smithy/variant_algebra.py +++ b/conda_smithy/variant_algebra.py @@ -77,7 +77,7 @@ def variant_key_add( v_left: Union[List[str], List[float]], v_right: Union[List[str], List[float]], ordering: Optional[List[str]] = None, -) -> List[Union[str,float]]: +) -> List[Union[str, float]]: """Version summation adder. This takes the higher version of the two things. diff --git a/news/1957_add_typing.rst b/news/1957_add_typing.rst new file mode 100644 index 000000000..40d3195e7 --- /dev/null +++ b/news/1957_add_typing.rst @@ -0,0 +1,23 @@ +**Added:** + +* + +**Changed:** + +* Added typing to conda_smithy and tests files + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* + +**Security:** + +* diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index 3aefb9c86..c6da85fdd 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -23,9 +23,7 @@ def keep_dir(dirname: str): fh.write("") -def parameterize() -> ( - Iterator[tuple] -): +def parameterize() -> Iterator[tuple]: for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth, From ced7757e151651bb3de9e3b005a492183248ac36 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Fri, 23 Aug 2024 11:47:09 +0200 Subject: [PATCH 08/14] chore: Reformat files with black --- conda_smithy/cli.py | 4 +++- conda_smithy/feedstock_io.py | 5 ++--- conda_smithy/lint_recipe.py | 5 ++++- tests/conftest.py | 10 +++++++--- 4 files changed, 16 insertions(+), 8 deletions(-) diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index 95d802aa3..cc156d7c3 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -33,7 +33,9 @@ def default_feedstock_config_path(feedstock_directory: str) -> str: def generate_feedstock_content( - target_directory: str, source_recipe_dir: str, conda_build_tool: Optional[str] = None + target_directory: str, + source_recipe_dir: str, + conda_build_tool: Optional[str] = None, ): target_directory = os.path.abspath(target_directory) recipe_dir = "recipe" diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index 7660bdac1..95d77f16f 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -6,9 +6,8 @@ from io import TextIOWrapper from typing import Iterator, Optional, Any, Union -def get_repo( - path: str, search_parent_directories: bool = True -): + +def get_repo(path: str, search_parent_directories: bool = True): repo = None try: import git diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index bea2ca1d9..c369ec808 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -649,7 +649,10 @@ def _format_validation_msg(error: jsonschema.ValidationError): def main( - recipe_dir: str, conda_forge: bool = False, return_hints: bool = False, feedstock_dir=None + recipe_dir: str, + conda_forge: bool = False, + return_hints: bool = False, + feedstock_dir=None, ) -> Union[ Tuple[List[str], List[str]], Tuple[List[str], List[Any]], diff --git a/tests/conftest.py b/tests/conftest.py index b11624300..e35cf7a53 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -120,7 +120,9 @@ def config_yaml(testing_workdir: str, recipe_dirname: str, request) -> str: @pytest.fixture(scope="function") -def noarch_recipe(config_yaml: ConfigYAML, recipe_dirname: str) -> RecipeConfigPair: +def noarch_recipe( + config_yaml: ConfigYAML, recipe_dirname: str +) -> RecipeConfigPair: # get the used params passed for config_yaml fixture with open( os.path.join( @@ -369,7 +371,7 @@ def recipe_migration_cfep9(config_yaml: ConfigYAML): @pytest.fixture(scope="function") def recipe_migration_cfep9_downgrade( config_yaml: ConfigYAML, recipe_migration_cfep9: RecipeConfigPair -)-> RecipeConfigPair: +) -> RecipeConfigPair: # write a downgrade migrator that lives next to the current migrator. # Only this, more recent migrator should apply. os.makedirs( @@ -406,7 +408,9 @@ def recipe_migration_cfep9_downgrade( @pytest.fixture(scope="function") -def recipe_migration_win_compiled(config_yaml: ConfigYAML, py_recipe: RecipeConfigPair) -> RecipeConfigPair: +def recipe_migration_win_compiled( + config_yaml: ConfigYAML, py_recipe: RecipeConfigPair +) -> RecipeConfigPair: os.makedirs( os.path.join(config_yaml.workdir, ".ci_support", "migrations"), exist_ok=True, From 958971a40f96d0649a30c8859a24cadcc8038423 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Tue, 27 Aug 2024 12:13:38 +0200 Subject: [PATCH 09/14] chore: Update tests to rebase --- conda_smithy/configure_feedstock.py | 68 ++++++++++++++++++----------- tests/test_configure_feedstock.py | 2 +- tests/test_lint_recipe.py | 2 +- 3 files changed, 45 insertions(+), 27 deletions(-) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index e8ad9ce8e..55fe0821b 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -3,13 +3,12 @@ import hashlib import logging import os +import pprint import re import subprocess import sys -import pprint import textwrap import time -import yaml import warnings from collections import Counter, OrderedDict, namedtuple from copy import deepcopy @@ -17,7 +16,9 @@ from itertools import chain, product from os import fspath from pathlib import Path, PurePath + import requests +import yaml # Imports for typing from conda_build.config import Config @@ -35,10 +36,6 @@ Union, ) -try: - from builtins import ExceptionGroup -except ImportError: - from exceptiongroup import ExceptionGroup # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` @@ -49,23 +46,22 @@ except ImportError: import json # type: ignore -from conda.models.match_spec import MatchSpec -from conda.models.version import VersionOrder -from conda.exceptions import InvalidVersionSpec - import conda_build.api import conda_build.render import conda_build.utils import conda_build.variants -import conda_build.conda_interface -import conda_build.render +from conda.exceptions import InvalidVersionSpec from conda.models.match_spec import MatchSpec +from conda.models.version import VersionOrder +from conda_build import __version__ as conda_build_version from conda_build.metadata import get_selectors +from jinja2 import FileSystemLoader +from jinja2.sandbox import SandboxedEnvironment +from rattler_build_conda_compat.loader import parse_recipe_config_file +from rattler_build_conda_compat.render import render as rattler_render -from copy import deepcopy -from conda_build import __version__ as conda_build_version -from jinja2 import Environment, FileSystemLoader +from conda_smithy import __version__ from conda_smithy.feedstock_io import ( copy_file, @@ -74,19 +70,16 @@ set_exe_file, write_file, ) -from conda_smithy.validate_schema import ( - validate_json_schema, - CONDA_FORGE_YAML_DEFAULTS_FILE, -) from conda_smithy.utils import ( + RATTLER_BUILD, + HashableDict, get_feedstock_about_from_meta, get_feedstock_name_from_meta, ) - -from . import __version__ -from .rattler_build.build import render as rattler_render -from .rattler_build.loader import parse_recipe_config_file -from .utils import RATTLER_BUILD +from conda_smithy.validate_schema import ( + CONDA_FORGE_YAML_DEFAULTS_FILE, + validate_json_schema, +) conda_forge_content = os.path.abspath(os.path.dirname(__file__)) @@ -1093,6 +1086,25 @@ def _render_ci_provider( os.path.join(forge_dir, forge_config["recipe_dir"]), config=config ) + # If we are using new recipe + # we also load v1 variants.yaml + if recipe_file == "recipe.yaml": + # get_selectors from conda-build return namespace + # so it is usefull to reuse it here + namespace = get_selectors(config) + variants_path = os.path.join( + forge_dir, forge_config["recipe_dir"], "variants.yaml" + ) + if os.path.exists(variants_path): + new_spec = parse_recipe_config_file(variants_path, namespace) + specs = { + "combined_spec": combined_variant_spec, + "variants.yaml": new_spec, + } + combined_variant_spec = conda_build.variants.combine_specs( + specs + ) + migrated_combined_variant_spec: Any migrated_combined_variant_spec = migrate_combined_spec( combined_variant_spec, @@ -2151,8 +2163,14 @@ def azure_build_id_from_public(forge_config: Dict[str, Any]): build_def = resp.json()["value"][0] forge_config["azure"]["build_id"] = build_def["id"] +def get_maintainer_url(user_or_team): + if "/" in user_or_team: + org, team_name = user_or_team.split("/") + return f"https://github.com/orgs/{org}/teams/{team_name}/" + else: + return f"https://github.com/{user_or_team}/" -def render_README( +def render_readme( jinja_env: SandboxedEnvironment, forge_config: Dict[str, Any], forge_dir: str, diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index ad1a06020..ea2dddc8e 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -771,7 +771,7 @@ def test_migrator_compiler_version_recipe( def test_files_skip_render( render_skipped_recipe: RecipeConfigPair, jinja_env: SandboxedEnvironment ): - configure_feedstock.render_README( + configure_feedstock.render_readme( jinja_env=jinja_env, forge_config=render_skipped_recipe.config, forge_dir=render_skipped_recipe.recipe, diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 2f7f603d5..30be620c8 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -300,7 +300,7 @@ def test_cbc_osx_hints( macdt: Optional[List[str]], v_std: Optional[List[str]], sdk: Optional[List[str]], - exp_hint: Optional[str], + exp_lint: Optional[str], ): with tmp_directory() as rdir: with open(os.path.join(rdir, "meta.yaml"), "w") as fh: From e4953e8ad28a9cdd5a18edeeae585058ba04b003 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Tue, 27 Aug 2024 12:15:03 +0200 Subject: [PATCH 10/14] chore: Format file with black --- conda_smithy/configure_feedstock.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 55fe0821b..6df28697b 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -2163,6 +2163,7 @@ def azure_build_id_from_public(forge_config: Dict[str, Any]): build_def = resp.json()["value"][0] forge_config["azure"]["build_id"] = build_def["id"] + def get_maintainer_url(user_or_team): if "/" in user_or_team: org, team_name = user_or_team.split("/") @@ -2170,6 +2171,7 @@ def get_maintainer_url(user_or_team): else: return f"https://github.com/{user_or_team}/" + def render_readme( jinja_env: SandboxedEnvironment, forge_config: Dict[str, Any], From 7f7e5f06b4fbecd7730c05c4dd1c402bd7e2c338 Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Wed, 28 Aug 2024 16:09:45 +0200 Subject: [PATCH 11/14] fix: Update types to address errors --- conda_smithy/configure_feedstock.py | 4 +++- conda_smithy/lint_recipe.py | 8 ++++---- conda_smithy/linter/conda_recipe_v1_linter.py | 4 ++-- conda_smithy/linter/lints.py | 4 ++-- conda_smithy/linter/utils.py | 1 + conda_smithy/schema.py | 6 +++--- pyproject.toml | 4 +++- tests/conftest.py | 8 ++++---- tests/test_lint_recipe.py | 4 ++-- 9 files changed, 24 insertions(+), 19 deletions(-) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 6df28697b..d7d91ac6e 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -2472,7 +2472,9 @@ def _legacy_compatibility_checks( def _load_forge_config( - forge_dir: str, exclusive_config_file: str, forge_yml: Optional[str] = None + forge_dir: str, + exclusive_config_file: Optional[str], + forge_yml: Optional[str] = None, ) -> Dict[str, Any]: config = _read_forge_config(forge_dir, forge_yml=forge_yml) diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index c369ec808..e5579737f 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -83,7 +83,7 @@ NEEDED_FAMILIES = ["gpl", "bsd", "mit", "apache", "psf"] -def lintify_forge_yaml(recipe_dir: Optional[str] = None) -> (list, list): +def lintify_forge_yaml(recipe_dir: Optional[str] = None) -> tuple: if recipe_dir: forge_yaml_filename = ( glob(os.path.join(recipe_dir, "..", "conda-forge.yml")) @@ -112,8 +112,8 @@ def lintify_meta_yaml( conda_forge: bool = False, recipe_version: int = 0, ) -> Tuple[List[str], List[str]]: - lints = [] - hints = [] + lints: list = [] + hints: list = [] major_sections = list(meta.keys()) # If the recipe_dir exists (no guarantee within this function) , we can @@ -611,7 +611,7 @@ def run_conda_forge_specific( ) -def _format_validation_msg(error: jsonschema.ValidationError): +def _format_validation_msg(error): """Use the data on the validation error to generate improved reporting. If available, get the help URL from the first level of the JSON path: diff --git a/conda_smithy/linter/conda_recipe_v1_linter.py b/conda_smithy/linter/conda_recipe_v1_linter.py index 10b50d49d..be07843a5 100644 --- a/conda_smithy/linter/conda_recipe_v1_linter.py +++ b/conda_smithy/linter/conda_recipe_v1_linter.py @@ -44,8 +44,8 @@ def lint_recipe_tests( lints: List[str], hints: List[str], ): - tests_lints = [] - tests_hints = [] + tests_lints: list = [] + tests_hints: list = [] if not test_section: if not outputs_section: diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index a338b87fe..48040d0a9 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -44,7 +44,7 @@ def lint_section_order( section_order_sorted = sorted(major_sections, key=order.index) if major_sections != section_order_sorted: - section_order_sorted_str = map( + section_order_sorted_str: str | map[str] = map( lambda s: f"'{s}'", section_order_sorted ) section_order_sorted_str = ", ".join(section_order_sorted_str) @@ -83,7 +83,7 @@ def lint_recipe_maintainers(extra_section, lints): def lint_recipe_have_tests( - recipe_dir: str, + recipe_dir: Optional[str], test_section: List[Dict[str, Any]], outputs_section: List[Dict[str, Any]], lints: List[str], diff --git a/conda_smithy/linter/utils.py b/conda_smithy/linter/utils.py index 31c8545bf..8010e7ef5 100644 --- a/conda_smithy/linter/utils.py +++ b/conda_smithy/linter/utils.py @@ -206,6 +206,7 @@ def _lint_package_version(version: Optional[str]) -> Optional[str]: VersionOrder(ver) except InvalidVersionSpec as e: return invalid_version.format(ver=ver, err=e) + return None @lru_cache(maxsize=1) diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index 66b3b6236..8de71e9e3 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -11,9 +11,9 @@ from pydantic import BaseModel, ConfigDict, Field, create_model try: - from enum import StrEnum # type: ignore + from enum import StrEnum except ImportError: - from backports.strenum import StrEnum + from backports.strenum import StrEnum # type: ignore from conda_smithy.validate_schema import ( @@ -493,7 +493,7 @@ def get_subdirs() -> List[str]: ] -Platforms = StrEnum("Platforms", get_subdirs()) +Platforms = StrEnum("Platforms", get_subdirs()) # type: ignore class ChannelPriorityConfig(StrEnum): diff --git a/pyproject.toml b/pyproject.toml index 02184244c..5acd20703 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,8 @@ module = [ 'license_expression', 'cirun', 'scrypt', - 'toolz' + 'toolz', + 'rattler_build_conda_compat.*', + 'tlz' ] ignore_missing_imports = true diff --git a/tests/conftest.py b/tests/conftest.py index e35cf7a53..47bd58e0c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,7 +22,7 @@ class ConfigYAML(typing.NamedTuple): - workdir: Path + workdir: str recipe_name: str type: str @@ -61,7 +61,7 @@ def recipe_dirname(): @pytest.fixture(scope="function", params=["conda-build", "rattler-build"]) -def config_yaml(testing_workdir: str, recipe_dirname: str, request) -> str: +def config_yaml(testing_workdir: str, recipe_dirname: str, request): config: dict = {"python": ["2.7", "3.5"], "r_base": ["3.3.2", "3.4.2"]} os.makedirs(os.path.join(testing_workdir, recipe_dirname)) with open(os.path.join(testing_workdir, "config.yaml"), "w") as f: @@ -650,7 +650,7 @@ def jinja_env(): @pytest.fixture(scope="function") -def v1_noarch_recipe_with_context(testing_workdir: Path, recipe_dirname): +def v1_noarch_recipe_with_context(testing_workdir: str, recipe_dirname): with open(os.path.join(testing_workdir, "conda-forge.yml"), "w") as f: config = { "recipe_dir": recipe_dirname, @@ -688,7 +688,7 @@ def v1_noarch_recipe_with_context(testing_workdir: Path, recipe_dirname): @pytest.fixture(scope="function") -def v1_recipe_with_multiple_outputs(testing_workdir: Path, recipe_dirname): +def v1_recipe_with_multiple_outputs(testing_workdir: str, recipe_dirname): with open(os.path.join(testing_workdir, "conda-forge.yml"), "w") as f: config = { "recipe_dir": recipe_dirname, diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 30be620c8..53b3e73f1 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -31,7 +31,7 @@ def is_gh_token_set() -> bool: @contextmanager -def get_recipe_in_dir(recipe_name: str) -> Path: +def get_recipe_in_dir(recipe_name: str): base_dir = Path(__file__).parent recipe_path = base_dir / "recipes" / recipe_name assert recipe_path.exists(), f"Recipe {recipe_name} does not exist" @@ -2677,7 +2677,7 @@ def test_lint_wheels(tmp_path, yaml_block, annotation): @pytest.mark.parametrize("recipe_version", [0, 1]) def test_pin_compatible_in_run_exports(recipe_version: int): - meta = { + meta: dict = { "package": { "name": "apackage", } From 1babdf180d2e808a9f8d1fc4a4b12c9b2ff1a0ea Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Wed, 28 Aug 2024 16:17:52 +0200 Subject: [PATCH 12/14] fix: Add type fixes --- conda_smithy/linter/lints.py | 4 ++-- conda_smithy/schema.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index 48040d0a9..77c3be87a 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -2,7 +2,7 @@ import os import re from collections.abc import Sequence -from typing import Any, Dict, List, Literal, Optional +from typing import Any, Dict, List, Literal, Optional, Union from conda.exceptions import InvalidVersionSpec from conda.models.version import VersionOrder @@ -44,7 +44,7 @@ def lint_section_order( section_order_sorted = sorted(major_sections, key=order.index) if major_sections != section_order_sorted: - section_order_sorted_str: str | map[str] = map( + section_order_sorted_str: Union[str, map[str]] = map( lambda s: f"'{s}'", section_order_sorted ) section_order_sorted_str = ", ".join(section_order_sorted_str) diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index 8de71e9e3..edaaf336a 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -11,7 +11,7 @@ from pydantic import BaseModel, ConfigDict, Field, create_model try: - from enum import StrEnum + from enum import StrEnum # type: ignore except ImportError: from backports.strenum import StrEnum # type: ignore From 9e26d3b8e4a8a4c6eec79f40afcdd4522e4e3e1b Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Wed, 28 Aug 2024 16:36:09 +0200 Subject: [PATCH 13/14] chore: Formatting with ruff --- conda_smithy/anaconda_token_rotation.py | 3 +-- conda_smithy/ci_skeleton.py | 5 +++-- conda_smithy/cli.py | 3 +-- conda_smithy/configure_feedstock.py | 18 +++++++----------- conda_smithy/feedstock_io.py | 5 ++--- conda_smithy/feedstock_tokens.py | 8 ++++---- conda_smithy/lint_recipe.py | 5 ----- conda_smithy/schema.py | 4 ++-- conda_smithy/utils.py | 6 +++--- conda_smithy/validate_schema.py | 4 ++-- tests/conftest.py | 4 ++-- tests/test_anaconda_token_rotation.py | 4 ++-- tests/test_ci_skeleton.py | 1 - tests/test_cli.py | 4 ++-- tests/test_condaforge_config_schema.py | 4 ---- tests/test_configure_feedstock.py | 11 +---------- tests/test_feedstock_io.py | 6 +++--- tests/test_feedstock_tokens.py | 4 ++-- tests/test_lint_recipe.py | 6 +++--- 19 files changed, 40 insertions(+), 65 deletions(-) diff --git a/conda_smithy/anaconda_token_rotation.py b/conda_smithy/anaconda_token_rotation.py index 45dac4b53..a3439126b 100644 --- a/conda_smithy/anaconda_token_rotation.py +++ b/conda_smithy/anaconda_token_rotation.py @@ -12,8 +12,7 @@ import os import sys from contextlib import redirect_stderr, redirect_stdout -from github import Github -from typing import List, Optional, Union +from typing import Optional, Union import requests from github import Github diff --git a/conda_smithy/ci_skeleton.py b/conda_smithy/ci_skeleton.py index 2f4df96f7..a49c8d1fe 100644 --- a/conda_smithy/ci_skeleton.py +++ b/conda_smithy/ci_skeleton.py @@ -8,10 +8,11 @@ import os import sys +from typing import Dict -from .configure_feedstock import make_jinja_env from jinja2.sandbox import SandboxedEnvironment -from typing import Dict + +from conda_smithy.configure_feedstock import make_jinja_env def _render_template( diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index cc156d7c3..18a7dad20 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -6,14 +6,13 @@ import tempfile import time from textwrap import dedent -from typing import Optional, Union +from typing import List, Optional, Union import conda # noqa from conda_build.metadata import MetaData from rattler_build_conda_compat.render import MetaData as RattlerMetaData from rattler_build_conda_compat.utils import has_recipe as has_recipe_v1 from ruamel.yaml import YAML -from typing import Optional, List import conda_smithy.cirun_utils from conda_smithy import __version__, configure_feedstock, feedstock_io diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index d7d91ac6e..15082fde6 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -16,14 +16,6 @@ from itertools import chain, product from os import fspath from pathlib import Path, PurePath - -import requests -import yaml - -# Imports for typing -from conda_build.config import Config -from conda_build.metadata import MetaData -from jinja2.sandbox import SandboxedEnvironment from typing import ( Any, Callable, @@ -36,6 +28,13 @@ Union, ) +import requests +import yaml + +# Imports for typing +from conda_build.config import Config +from conda_build.metadata import MetaData +from jinja2.sandbox import SandboxedEnvironment # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` @@ -56,13 +55,10 @@ from conda_build import __version__ as conda_build_version from conda_build.metadata import get_selectors from jinja2 import FileSystemLoader -from jinja2.sandbox import SandboxedEnvironment from rattler_build_conda_compat.loader import parse_recipe_config_file from rattler_build_conda_compat.render import render as rattler_render - from conda_smithy import __version__ - from conda_smithy.feedstock_io import ( copy_file, remove_file, diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index 95d77f16f..4c081cad7 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -1,10 +1,9 @@ -from contextlib import contextmanager -import io import os import shutil import stat +from contextlib import contextmanager from io import TextIOWrapper -from typing import Iterator, Optional, Any, Union +from typing import Iterator, Optional, Union def get_repo(path: str, search_parent_directories: bool = True): diff --git a/conda_smithy/feedstock_tokens.py b/conda_smithy/feedstock_tokens.py index bb01a0f5e..35b1c50ba 100644 --- a/conda_smithy/feedstock_tokens.py +++ b/conda_smithy/feedstock_tokens.py @@ -30,10 +30,6 @@ import tempfile import time from contextlib import contextmanager, redirect_stderr, redirect_stdout - -import git -import requests -import scrypt from typing import ( Iterator, Optional, @@ -41,6 +37,10 @@ Union, ) +import git +import requests +import scrypt + class FeedstockTokenError(Exception): """Custom exception for sanitized token errors.""" diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index e5579737f..b6602ec2f 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -7,20 +7,15 @@ from inspect import cleandoc from pathlib import Path from textwrap import indent -from io import TextIOWrapper from typing import ( Any, - Dict, - Iterator, List, Optional, - Set, Tuple, Union, ) import github -import jsonschema import requests from conda_build.metadata import ( ensure_valid_license_family, diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index edaaf336a..165b6d353 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -508,13 +508,13 @@ class DefaultTestPlatforms(StrEnum): native_and_emulated = "native_and_emulated" -buildPlatform_fields: Dict[str, Any] = { +build_platform_fields: Dict[str, Any] = { platform.value: (Optional[Platforms], Field(default=platform.value)) for platform in Platforms } BuildPlatform = create_model( "build_platform", - **buildPlatform_fields, + **build_platform_fields, ) OSVersion_fields: Dict[str, Any] = { diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py index 6648ebe05..86f399257 100644 --- a/conda_smithy/utils.py +++ b/conda_smithy/utils.py @@ -8,16 +8,16 @@ from contextlib import contextmanager from pathlib import Path from typing import Any, Dict, Union -from conda_build.metadata import MetaData import jinja2 import jinja2.sandbox import ruamel.yaml -from ruamel.yaml.comments import CommentedMap -from ruamel.yaml.main import YAML from conda_build.api import render as conda_build_render + +# from conda_build.metadata import MetaData from conda_build.render import MetaData from rattler_build_conda_compat.render import MetaData as RattlerBuildMetaData +from ruamel.yaml.comments import CommentedMap RATTLER_BUILD = "rattler-build" CONDA_BUILD = "conda-build" diff --git a/conda_smithy/validate_schema.py b/conda_smithy/validate_schema.py index 0ca19e841..8bc109120 100644 --- a/conda_smithy/validate_schema.py +++ b/conda_smithy/validate_schema.py @@ -1,12 +1,12 @@ import json from pathlib import Path from typing import ( - Tuple, - List, Dict, + List, Optional, Union, ) + from jsonschema import Draft202012Validator, validators from jsonschema.exceptions import ValidationError from ruamel.yaml.comments import CommentedMap diff --git a/tests/conftest.py b/tests/conftest.py index 47bd58e0c..215b5901e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,8 @@ import pytest import yaml +from _pytest._py.path import LocalPath +from _pytest.fixtures import SubRequest from conda_build.utils import copy_into from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment @@ -13,8 +15,6 @@ _load_forge_config, conda_forge_content, ) -from _pytest._py.path import LocalPath -from _pytest.fixtures import SubRequest RecipeConfigPair = collections.namedtuple( "RecipeConfigPair", ("recipe", "config") diff --git a/tests/test_anaconda_token_rotation.py b/tests/test_anaconda_token_rotation.py index ddc44ac56..01a29e62c 100644 --- a/tests/test_anaconda_token_rotation.py +++ b/tests/test_anaconda_token_rotation.py @@ -1,11 +1,11 @@ from unittest import mock +from unittest.mock import MagicMock import pytest +from _pytest.monkeypatch import MonkeyPatch from conda_smithy.anaconda_token_rotation import rotate_anaconda_token from conda_smithy.ci_register import drone_default_endpoint -from unittest.mock import MagicMock -from _pytest.monkeypatch import MonkeyPatch @pytest.mark.parametrize("appveyor", [True, False]) diff --git a/tests/test_ci_skeleton.py b/tests/test_ci_skeleton.py index f97e1851f..2d05eec0f 100644 --- a/tests/test_ci_skeleton.py +++ b/tests/test_ci_skeleton.py @@ -1,4 +1,3 @@ -import pytest from _pytest._py.path import LocalPath from conda_smithy.ci_skeleton import generate diff --git a/tests/test_cli.py b/tests/test_cli.py index 8b4b8257a..6892815a2 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,13 +4,13 @@ import shutil import subprocess from textwrap import dedent +from typing import Union import pytest import yaml -from typing import Union +from conftest import RecipeConfigPair from conda_smithy import cli -from conftest import RecipeConfigPair _thisdir = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/test_condaforge_config_schema.py b/tests/test_condaforge_config_schema.py index c21ec40e8..b4e08b586 100644 --- a/tests/test_condaforge_config_schema.py +++ b/tests/test_condaforge_config_schema.py @@ -2,10 +2,6 @@ from pydantic import ValidationError from conda_smithy.schema import ConfigModel -from typing import ( - Dict, - Union, -) # Sample config files SAMPLE_CONFIGS = [ diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index ea2dddc8e..913643f31 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -9,18 +9,9 @@ import pytest import yaml -from conftest import ConfigYAML -from conftest import RecipeConfigPair from _pytest.logging import LogCaptureFixture +from conftest import ConfigYAML, RecipeConfigPair from jinja2.sandbox import SandboxedEnvironment -from typing import ( - Any, - Dict, - List, - Set, - Tuple, - Union, -) from conda_smithy import configure_feedstock from conda_smithy.configure_feedstock import _read_forge_config diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index c6da85fdd..49177898a 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -7,13 +7,13 @@ import string import tempfile import unittest - -import git -from git.index.typ import BlobFilter from typing import ( Iterator, ) +import git +from git.index.typ import BlobFilter + import conda_smithy.feedstock_io as fio diff --git a/tests/test_feedstock_tokens.py b/tests/test_feedstock_tokens.py index 714791491..aae3c54cc 100644 --- a/tests/test_feedstock_tokens.py +++ b/tests/test_feedstock_tokens.py @@ -1,13 +1,13 @@ import json import os import time +from typing import Any, Optional from unittest import mock +from unittest.mock import MagicMock import pytest import scrypt from _pytest._py.path import LocalPath -from typing import Optional, Any -from unittest.mock import MagicMock from conda_smithy.ci_register import drone_default_endpoint from conda_smithy.feedstock_tokens import ( diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 53b3e73f1..24bfa2fbc 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -9,9 +9,6 @@ from collections import OrderedDict from contextlib import contextmanager from pathlib import Path - -import github -import pytest from typing import ( Iterator, List, @@ -19,6 +16,9 @@ Tuple, ) +import github +import pytest + import conda_smithy.lint_recipe as linter from conda_smithy.linter.utils import VALID_PYTHON_BUILD_BACKENDS from conda_smithy.utils import get_yaml From bafe4083ba7130fad7a227a633cd1b024025d67a Mon Sep 17 00:00:00 2001 From: Ninette Adhikari Date: Wed, 28 Aug 2024 16:41:28 +0200 Subject: [PATCH 14/14] ci: Make type check optional --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bf4bcfbf2..70869e370 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,6 +69,7 @@ jobs: - name: run type check run: | mypy conda_smithy tests + continue-on-error: true # type check is optional - name: coveralls uses: coverallsapp/github-action@master