diff --git a/.github/actions/pre-test/action.yml b/.github/actions/pre-test/action.yml index b70c942f..9d1eb2de 100644 --- a/.github/actions/pre-test/action.yml +++ b/.github/actions/pre-test/action.yml @@ -6,4 +6,4 @@ runs: using: composite steps: - uses: asottile/workflows/.github/actions/latest-git@v1.4.0 - if: inputs.env == 'py39' && runner.os == 'Linux' + if: inputs.env == 'py38' && runner.os == 'Linux' diff --git a/.github/workflows/languages.yaml b/.github/workflows/languages.yaml index be8963ba..8bc8e712 100644 --- a/.github/workflows/languages.yaml +++ b/.github/workflows/languages.yaml @@ -3,7 +3,7 @@ name: languages on: push: branches: [main, test-me-*] - tags: '*' + tags: pull_request: concurrency: @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: install deps run: python -mpip install -e . -r requirements-dev.txt - name: vars @@ -36,10 +36,10 @@ jobs: matrix: include: ${{ fromJSON(needs.vars.outputs.languages) }} steps: - - uses: asottile/workflows/.github/actions/fast-checkout@v1.8.1 + - uses: asottile/workflows/.github/actions/fast-checkout@v1.4.0 - uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - run: echo "$CONDA\Scripts" >> "$GITHUB_PATH" shell: bash @@ -63,10 +63,8 @@ jobs: echo 'C:\Strawberry\c\bin' >> "$GITHUB_PATH" shell: bash if: matrix.os == 'windows-latest' && matrix.language == 'perl' - - uses: haskell/actions/setup@v2 - if: matrix.language == 'haskell' - - uses: r-lib/actions/setup-r@v2 - if: matrix.os == 'ubuntu-latest' && matrix.language == 'r' + - run: testing/get-swift.sh + if: matrix.os == 'ubuntu-latest' && matrix.language == 'swift' - name: install deps run: python -mpip install -e . -r requirements-dev.txt diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 02b11ae2..f281dcf2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -3,7 +3,7 @@ name: main on: push: branches: [main, test-me-*] - tags: '*' + tags: pull_request: concurrency: @@ -12,12 +12,12 @@ concurrency: jobs: main-windows: - uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 + uses: asottile/workflows/.github/workflows/tox.yml@v1.4.0 with: - env: '["py310"]' + env: '["py38"]' os: windows-latest main-linux: - uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 + uses: asottile/workflows/.github/workflows/tox.yml@v1.4.0 with: - env: '["py310", "py311", "py312", "py313"]' + env: '["py38", "py39", "py310"]' os: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3654066f..cc96a703 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v6.0.0 + rev: v4.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -10,35 +10,36 @@ repos: - id: name-tests-test - id: requirements-txt-fixer - repo: https://github.com/asottile/setup-cfg-fmt - rev: v3.2.0 + rev: v2.2.0 hooks: - id: setup-cfg-fmt -- repo: https://github.com/asottile/reorder-python-imports - rev: v3.16.0 +- repo: https://github.com/asottile/reorder_python_imports + rev: v3.9.0 hooks: - id: reorder-python-imports - exclude: ^pre_commit/resources/ - args: [--py310-plus, --add-import, 'from __future__ import annotations'] + exclude: ^(pre_commit/resources/|testing/resources/python3_hooks_repo/) + args: [--py38-plus, --add-import, 'from __future__ import annotations'] - repo: https://github.com/asottile/add-trailing-comma - rev: v4.0.0 + rev: v2.4.0 hooks: - id: add-trailing-comma + args: [--py36-plus] - repo: https://github.com/asottile/pyupgrade - rev: v3.21.2 + rev: v3.3.1 hooks: - id: pyupgrade - args: [--py310-plus] -- repo: https://github.com/hhatto/autopep8 - rev: v2.3.2 + args: [--py38-plus] +- repo: https://github.com/pre-commit/mirrors-autopep8 + rev: v2.0.2 hooks: - id: autopep8 - repo: https://github.com/PyCQA/flake8 - rev: 7.3.0 + rev: 6.0.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.19.1 + rev: v1.1.1 hooks: - id: mypy - additional_dependencies: [types-pyyaml] + additional_dependencies: [types-all] exclude: ^testing/resources/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 879ae073..efd96c79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,258 +1,3 @@ -4.5.1 - 2025-12-16 -================== - -### Fixes -- Fix `language: python` with `repo: local` without `additional_dependencies`. - - #3597 PR by @asottile. - -4.5.0 - 2025-11-22 -================== - -### Features -- Add `pre-commit hazmat`. - - #3585 PR by @asottile. - -4.4.0 - 2025-11-08 -================== - -### Features -- Add `--fail-fast` option to `pre-commit run`. - - #3528 PR by @JulianMaurin. -- Upgrade `ruby-build` / `rbenv`. - - #3566 PR by @asottile. - - #3565 issue by @MRigal. -- Add `language: unsupported` / `language: unsupported_script` as aliases - for `language: system` / `language: script` (which will eventually be - deprecated). - - #3577 PR by @asottile. -- Add support docker-in-docker detection for cgroups v2. - - #3535 PR by @br-rhrbacek. - - #3360 issue by @JasonAlt. - -### Fixes -- Handle when docker gives `SecurityOptions: null`. - - #3537 PR by @asottile. - - #3514 issue by @jenstroeger. -- Fix error context for invalid `stages` in `.pre-commit-config.yaml`. - - #3576 PR by @asottile. - -4.3.0 - 2025-08-09 -================== - -### Features -- `language: docker` / `language: docker_image`: detect rootless docker. - - #3446 PR by @matthewhughes934. - - #1243 issue by @dkolepp. -- `language: julia`: avoid `startup.jl` when executing hooks. - - #3496 PR by @ericphanson. -- `language: dart`: support latest dart versions which require a higher sdk - lower bound. - - #3507 PR by @bc-lee. - -4.2.0 - 2025-03-18 -================== - -### Features -- For `language: python` first attempt a versioned python executable for - the default language version before consulting a potentially unversioned - `sys.executable`. - - #3430 PR by @asottile. - -### Fixes -- Handle error during conflict detection when a file is named "HEAD" - - #3425 PR by @tusharsadhwani. - -4.1.0 - 2025-01-20 -================== - -### Features -- Add `language: julia`. - - #3348 PR by @fredrikekre. - - #2689 issue @jmuchovej. - -### Fixes -- Disable automatic toolchain switching for `language: golang`. - - #3304 PR by @AleksaC. - - #3300 issue by @AleksaC. - - #3149 issue by @nijel. -- Fix `language: r` installation when initiated by RStudio. - - #3389 PR by @lorenzwalthert. - - #3385 issue by @lorenzwalthert. - - -4.0.1 - 2024-10-08 -================== - -### Fixes -- Fix `pre-commit migrate-config` for unquoted deprecated stages names with - purelib `pyyaml`. - - #3324 PR by @asottile. - - pre-commit-ci/issues#234 issue by @lorenzwalthert. - -4.0.0 - 2024-10-05 -================== - -### Features -- Improve `pre-commit migrate-config` to handle more yaml formats. - - #3301 PR by @asottile. -- Handle `stages` deprecation in `pre-commit migrate-config`. - - #3302 PR by @asottile. - - #2732 issue by @asottile. -- Upgrade `ruby-build`. - - #3199 PR by @ThisGuyCodes. -- Add "sensible regex" warnings to `repo: meta`. - - #3311 PR by @asottile. -- Add warnings for deprecated `stages` (`commit` -> `pre-commit`, `push` -> - `pre-push`, `merge-commit` -> `pre-merge-commit`). - - #3312 PR by @asottile. - - #3313 PR by @asottile. - - #3315 PR by @asottile. - - #2732 issue by @asottile. - -### Updating -- `language: python_venv` has been removed -- use `language: python` instead. - - #3320 PR by @asottile. - - #2734 issue by @asottile. - -3.8.0 - 2024-07-28 -================== - -### Features -- Implement health checks for `language: r` so environments are recreated if - the system version of R changes. - - #3206 issue by @lorenzwalthert. - - #3265 PR by @lorenzwalthert. - -3.7.1 - 2024-05-10 -================== - -### Fixes -- Fix `language: rust` default language version check when `rust-toolchain.toml` - is present. - - issue by @gaborbernat. - - #3201 PR by @asottile. - -3.7.0 - 2024-03-24 -================== - -### Features -- Use a tty for `docker` and `docker_image` hooks when `--color` is specified. - - #3122 PR by @glehmann. - -### Fixes -- Fix `fail_fast` for individual hooks stopping when previous hooks had failed. - - #3167 issue by @tp832944. - - #3168 PR by @asottile. - -### Updating -- The per-hook behaviour of `fail_fast` was fixed. If you want the pre-3.7.0 - behaviour, add `fail_fast: true` to all hooks before the last `fail_fast` - hook. - -3.6.2 - 2024-02-18 -================== - -### Fixes -- Fix building golang hooks during `git commit --all`. - - #3130 PR by @asottile. - - #2722 issue by @pestanko and @matthewhughes934. - -3.6.1 - 2024-02-10 -================== - -### Fixes -- Remove `PYTHONEXECUTABLE` from environment when running. - - #3110 PR by @untitaker. -- Handle staged-files-only with only a crlf diff. - - #3126 PR by @asottile. - - issue by @tyyrok. - -3.6.0 - 2023-12-09 -================== - -### Features -- Check `minimum_pre_commit_version` first when parsing configs. - - #3092 PR by @asottile. - -### Fixes -- Fix deprecation warnings for `importlib.resources`. - - #3043 PR by @asottile. -- Fix deprecation warnings for rmtree. - - #3079 PR by @edgarrmondragon. - -### Updating -- Drop support for python<3.9. - - #3042 PR by @asottile. - - #3093 PR by @asottile. - -3.5.0 - 2023-10-13 -================== - -### Features -- Improve performance of `check-hooks-apply` and `check-useless-excludes`. - - #2998 PR by @mxr. - - #2935 issue by @mxr. - -### Fixes -- Use `time.monotonic()` for more accurate hook timing. - - #3024 PR by @adamchainz. - -### Updating -- Require npm 6.x+ for `language: node` hooks. - - #2996 PR by @RoelAdriaans. - - #1983 issue by @henryiii. - -3.4.0 - 2023-09-02 -================== - -### Features -- Add `language: haskell`. - - #2932 by @alunduil. -- Improve cpu count detection when run under cgroups. - - #2979 PR by @jdb8. - - #2978 issue by @jdb8. - -### Fixes -- Handle negative exit codes from hooks receiving posix signals. - - #2971 PR by @chriskuehl. - - #2970 issue by @chriskuehl. - -3.3.3 - 2023-06-13 -================== - -### Fixes -- Work around OS packagers setting `--install-dir` / `--bin-dir` in gem settings. - - #2905 PR by @jaysoffian. - - #2799 issue by @lmilbaum. - -3.3.2 - 2023-05-17 -================== - -### Fixes -- Work around `r` on windows sometimes double-un-quoting arguments. - - #2885 PR by @lorenzwalthert. - - #2870 issue by @lorenzwalthert. - -3.3.1 - 2023-05-02 -================== - -### Fixes -- Work around `git` partial clone bug for `autoupdate` on windows. - - #2866 PR by @asottile. - - #2865 issue by @adehad. - -3.3.0 - 2023-05-01 -================== - -### Features -- Upgrade ruby-build. - - #2846 PR by @jalessio. -- Use blobless clone for faster autoupdate. - - #2859 PR by @asottile. -- Add `-j` / `--jobs` argument to `autoupdate` for parallel execution. - - #2863 PR by @asottile. - - issue by @gaborbernat. - 3.2.2 - 2023-04-03 ================== diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index da7f9432..ab3a9298 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,7 +92,7 @@ language, for example: here are the apis that should be implemented for a language -Note that these are also documented in [`pre_commit/lang_base.py`](https://github.com/pre-commit/pre-commit/blob/main/pre_commit/lang_base.py) +Note that these are also documented in [`pre_commit/languages/all.py`](https://github.com/pre-commit/pre-commit/blob/main/pre_commit/languages/all.py) #### `ENVIRONMENT_DIR` @@ -111,7 +111,7 @@ one cannot be determined, return `'default'`. You generally don't need to implement this on a first pass and can just use: ```python -get_default_version = lang_base.basic_default_version +get_default_version = helpers.basic_default_version ``` `python` is currently the only language which implements this api @@ -125,7 +125,7 @@ healthy. You generally don't need to implement this on a first pass and can just use: ```python -health_check = lang_base.basic_health_check +health_check = helpers.basic_healthy_check ``` `python` is currently the only language which implements this api, for python @@ -137,7 +137,7 @@ this is the trickiest one to implement and where all the smart parts happen. this api should do the following things -- (0th / 3rd class): `install_environment = lang_base.no_install` +- (0th / 3rd class): `install_environment = helpers.no_install` - (1st class): install a language runtime into the hook's directory - (2nd class): install the package at `.` into the `ENVIRONMENT_DIR` - (2nd class, optional): install packages listed in `additional_dependencies` diff --git a/pre_commit/all_languages.py b/pre_commit/all_languages.py index 166bc167..2bed7067 100644 --- a/pre_commit/all_languages.py +++ b/pre_commit/all_languages.py @@ -9,8 +9,6 @@ from pre_commit.languages import docker_image from pre_commit.languages import dotnet from pre_commit.languages import fail from pre_commit.languages import golang -from pre_commit.languages import haskell -from pre_commit.languages import julia from pre_commit.languages import lua from pre_commit.languages import node from pre_commit.languages import perl @@ -19,9 +17,9 @@ from pre_commit.languages import python from pre_commit.languages import r from pre_commit.languages import ruby from pre_commit.languages import rust +from pre_commit.languages import script from pre_commit.languages import swift -from pre_commit.languages import unsupported -from pre_commit.languages import unsupported_script +from pre_commit.languages import system languages: dict[str, Language] = { @@ -33,8 +31,6 @@ languages: dict[str, Language] = { 'dotnet': dotnet, 'fail': fail, 'golang': golang, - 'haskell': haskell, - 'julia': julia, 'lua': lua, 'node': node, 'perl': perl, @@ -43,8 +39,10 @@ languages: dict[str, Language] = { 'r': r, 'ruby': ruby, 'rust': rust, + 'script': script, 'swift': swift, - 'unsupported': unsupported, - 'unsupported_script': unsupported_script, + 'system': system, + # TODO: fully deprecate `python_venv` + 'python_venv': python, } language_names = sorted(languages) diff --git a/pre_commit/clientlib.py b/pre_commit/clientlib.py index 51f14d26..d0651cae 100644 --- a/pre_commit/clientlib.py +++ b/pre_commit/clientlib.py @@ -2,14 +2,12 @@ from __future__ import annotations import functools import logging -import os.path import re import shlex import sys -from collections.abc import Callable -from collections.abc import Sequence from typing import Any from typing import NamedTuple +from typing import Sequence import cfgv from identify.identify import ALL_TAGS @@ -72,43 +70,6 @@ def transform_stage(stage: str) -> str: return _STAGES.get(stage, stage) -MINIMAL_MANIFEST_SCHEMA = cfgv.Array( - cfgv.Map( - 'Hook', 'id', - cfgv.Required('id', cfgv.check_string), - cfgv.Optional('stages', cfgv.check_array(cfgv.check_string), []), - ), -) - - -def warn_for_stages_on_repo_init(repo: str, directory: str) -> None: - try: - manifest = cfgv.load_from_filename( - os.path.join(directory, C.MANIFEST_FILE), - schema=MINIMAL_MANIFEST_SCHEMA, - load_strategy=yaml_load, - exc_tp=InvalidManifestError, - ) - except InvalidManifestError: - return # they'll get a better error message when it actually loads! - - legacy_stages = {} # sorted set - for hook in manifest: - for stage in hook.get('stages', ()): - if stage in _STAGES: - legacy_stages[stage] = True - - if legacy_stages: - logger.warning( - f'repo `{repo}` uses deprecated stage names ' - f'({", ".join(legacy_stages)}) which will be removed in a ' - f'future version. ' - f'Hint: often `pre-commit autoupdate --repo {shlex.quote(repo)}` ' - f'will fix this. ' - f'if it does not -- consider reporting an issue to that repo.', - ) - - class StagesMigrationNoDefault(NamedTuple): key: str default: Sequence[str] @@ -117,12 +78,11 @@ class StagesMigrationNoDefault(NamedTuple): if self.key not in dct: return - with cfgv.validate_context(f'At key: {self.key}'): - val = dct[self.key] - cfgv.check_array(cfgv.check_any)(val) + val = dct[self.key] + cfgv.check_array(cfgv.check_any)(val) - val = [transform_stage(v) for v in val] - cfgv.check_array(cfgv.check_one_of(STAGES))(val) + val = [transform_stage(v) for v in val] + cfgv.check_array(cfgv.check_one_of(STAGES))(val) def apply_default(self, dct: dict[str, Any]) -> None: if self.key not in dct: @@ -139,108 +99,13 @@ class StagesMigration(StagesMigrationNoDefault): super().apply_default(dct) -class DeprecatedStagesWarning(NamedTuple): - key: str - - def check(self, dct: dict[str, Any]) -> None: - if self.key not in dct: - return - - val = dct[self.key] - cfgv.check_array(cfgv.check_any)(val) - - legacy_stages = [stage for stage in val if stage in _STAGES] - if legacy_stages: - logger.warning( - f'hook id `{dct["id"]}` uses deprecated stage names ' - f'({", ".join(legacy_stages)}) which will be removed in a ' - f'future version. ' - f'run: `pre-commit migrate-config` to automatically fix this.', - ) - - def apply_default(self, dct: dict[str, Any]) -> None: - pass - - def remove_default(self, dct: dict[str, Any]) -> None: - raise NotImplementedError - - -class DeprecatedDefaultStagesWarning(NamedTuple): - key: str - - def check(self, dct: dict[str, Any]) -> None: - if self.key not in dct: - return - - val = dct[self.key] - cfgv.check_array(cfgv.check_any)(val) - - legacy_stages = [stage for stage in val if stage in _STAGES] - if legacy_stages: - logger.warning( - f'top-level `default_stages` uses deprecated stage names ' - f'({", ".join(legacy_stages)}) which will be removed in a ' - f'future version. ' - f'run: `pre-commit migrate-config` to automatically fix this.', - ) - - def apply_default(self, dct: dict[str, Any]) -> None: - pass - - def remove_default(self, dct: dict[str, Any]) -> None: - raise NotImplementedError - - -def _translate_language(name: str) -> str: - return { - 'system': 'unsupported', - 'script': 'unsupported_script', - }.get(name, name) - - -class LanguageMigration(NamedTuple): # remove - key: str - check_fn: Callable[[object], None] - - def check(self, dct: dict[str, Any]) -> None: - if self.key not in dct: - return - - with cfgv.validate_context(f'At key: {self.key}'): - self.check_fn(_translate_language(dct[self.key])) - - def apply_default(self, dct: dict[str, Any]) -> None: - if self.key not in dct: - return - - dct[self.key] = _translate_language(dct[self.key]) - - def remove_default(self, dct: dict[str, Any]) -> None: - raise NotImplementedError - - -class LanguageMigrationRequired(LanguageMigration): # replace with Required - def check(self, dct: dict[str, Any]) -> None: - if self.key not in dct: - raise cfgv.ValidationError(f'Missing required key: {self.key}') - - super().check(dct) - - MANIFEST_HOOK_DICT = cfgv.Map( 'Hook', 'id', - # check first in case it uses some newer, incompatible feature - cfgv.Optional( - 'minimum_pre_commit_version', - cfgv.check_and(cfgv.check_string, check_min_version), - '0', - ), - cfgv.Required('id', cfgv.check_string), cfgv.Required('name', cfgv.check_string), cfgv.Required('entry', cfgv.check_string), - LanguageMigrationRequired('language', cfgv.check_one_of(language_names)), + cfgv.Required('language', cfgv.check_one_of(language_names)), cfgv.Optional('alias', cfgv.check_string, ''), cfgv.Optional('files', check_string_regex, ''), @@ -259,6 +124,7 @@ MANIFEST_HOOK_DICT = cfgv.Map( cfgv.Optional('description', cfgv.check_string, ''), cfgv.Optional('language_version', cfgv.check_string, C.DEFAULT), cfgv.Optional('log_file', cfgv.check_string, ''), + cfgv.Optional('minimum_pre_commit_version', cfgv.check_string, '0'), cfgv.Optional('require_serial', cfgv.check_bool, False), StagesMigration('stages', []), cfgv.Optional('verbose', cfgv.check_bool, False), @@ -270,19 +136,10 @@ class InvalidManifestError(FatalError): pass -def _load_manifest_forward_compat(contents: str) -> object: - obj = yaml_load(contents) - if isinstance(obj, dict): - check_min_version('5') - raise AssertionError('unreachable') - else: - return obj - - load_manifest = functools.partial( cfgv.load_from_filename, schema=MANIFEST_SCHEMA, - load_strategy=_load_manifest_forward_compat, + load_strategy=yaml_load, exc_tp=InvalidManifestError, ) @@ -404,20 +261,12 @@ class NotAllowed(cfgv.OptionalNoDefault): raise cfgv.ValidationError(f'{self.key!r} cannot be overridden') -_COMMON_HOOK_WARNINGS = ( - OptionalSensibleRegexAtHook('files', cfgv.check_string), - OptionalSensibleRegexAtHook('exclude', cfgv.check_string), - DeprecatedStagesWarning('stages'), -) - META_HOOK_DICT = cfgv.Map( 'Hook', 'id', cfgv.Required('id', cfgv.check_string), cfgv.Required('id', cfgv.check_one_of(tuple(k for k, _ in _meta))), - # language must be `unsupported` - cfgv.Optional( - 'language', cfgv.check_one_of({'unsupported'}), 'unsupported', - ), + # language must be system + cfgv.Optional('language', cfgv.check_one_of({'system'}), 'system'), # entry cannot be overridden NotAllowed('entry', cfgv.check_any), *( @@ -434,7 +283,6 @@ META_HOOK_DICT = cfgv.Map( item for item in MANIFEST_HOOK_DICT.items ), - *_COMMON_HOOK_WARNINGS, ) CONFIG_HOOK_DICT = cfgv.Map( 'Hook', 'id', @@ -450,17 +298,18 @@ CONFIG_HOOK_DICT = cfgv.Map( for item in MANIFEST_HOOK_DICT.items if item.key != 'id' if item.key != 'stages' - if item.key != 'language' # remove ), StagesMigrationNoDefault('stages', []), - LanguageMigration('language', cfgv.check_one_of(language_names)), # remove - *_COMMON_HOOK_WARNINGS, + OptionalSensibleRegexAtHook('files', cfgv.check_string), + OptionalSensibleRegexAtHook('exclude', cfgv.check_string), ) LOCAL_HOOK_DICT = cfgv.Map( 'Hook', 'id', *MANIFEST_HOOK_DICT.items, - *_COMMON_HOOK_WARNINGS, + + OptionalSensibleRegexAtHook('files', cfgv.check_string), + OptionalSensibleRegexAtHook('exclude', cfgv.check_string), ) CONFIG_REPO_DICT = cfgv.Map( 'Repository', 'repo', @@ -496,13 +345,6 @@ DEFAULT_LANGUAGE_VERSION = cfgv.Map( CONFIG_SCHEMA = cfgv.Map( 'Config', None, - # check first in case it uses some newer, incompatible feature - cfgv.Optional( - 'minimum_pre_commit_version', - cfgv.check_and(cfgv.check_string, check_min_version), - '0', - ), - cfgv.RequiredRecurse('repos', cfgv.Array(CONFIG_REPO_DICT)), cfgv.Optional( 'default_install_hook_types', @@ -513,10 +355,14 @@ CONFIG_SCHEMA = cfgv.Map( 'default_language_version', DEFAULT_LANGUAGE_VERSION, {}, ), StagesMigration('default_stages', STAGES), - DeprecatedDefaultStagesWarning('default_stages'), cfgv.Optional('files', check_string_regex, ''), cfgv.Optional('exclude', check_string_regex, '^$'), cfgv.Optional('fail_fast', cfgv.check_bool, False), + cfgv.Optional( + 'minimum_pre_commit_version', + cfgv.check_and(cfgv.check_string, check_min_version), + '0', + ), cfgv.WarnAdditionalKeys( ( 'repos', diff --git a/pre_commit/commands/autoupdate.py b/pre_commit/commands/autoupdate.py index aa0c5e25..7ed6e776 100644 --- a/pre_commit/commands/autoupdate.py +++ b/pre_commit/commands/autoupdate.py @@ -1,23 +1,22 @@ from __future__ import annotations -import concurrent.futures import os.path import re import tempfile -from collections.abc import Sequence from typing import Any from typing import NamedTuple +from typing import Sequence import pre_commit.constants as C from pre_commit import git from pre_commit import output -from pre_commit import xargs from pre_commit.clientlib import InvalidManifestError from pre_commit.clientlib import load_config from pre_commit.clientlib import load_manifest from pre_commit.clientlib import LOCAL from pre_commit.clientlib import META from pre_commit.commands.migrate_config import migrate_config +from pre_commit.store import Store from pre_commit.util import CalledProcessError from pre_commit.util import cmd_output from pre_commit.util import cmd_output_b @@ -28,58 +27,49 @@ from pre_commit.yaml import yaml_load class RevInfo(NamedTuple): repo: str rev: str - frozen: str | None = None - hook_ids: frozenset[str] = frozenset() + frozen: str | None @classmethod def from_config(cls, config: dict[str, Any]) -> RevInfo: - return cls(config['repo'], config['rev']) + return cls(config['repo'], config['rev'], None) def update(self, tags_only: bool, freeze: bool) -> RevInfo: + git_cmd = ('git', *git.NO_FS_MONITOR) + + if tags_only: + tag_cmd = ( + *git_cmd, 'describe', + 'FETCH_HEAD', '--tags', '--abbrev=0', + ) + else: + tag_cmd = ( + *git_cmd, 'describe', + 'FETCH_HEAD', '--tags', '--exact', + ) + with tempfile.TemporaryDirectory() as tmp: - _git = ('git', *git.NO_FS_MONITOR, '-C', tmp) - - if tags_only: - tag_opt = '--abbrev=0' - else: - tag_opt = '--exact' - tag_cmd = (*_git, 'describe', 'FETCH_HEAD', '--tags', tag_opt) - git.init_repo(tmp, self.repo) - cmd_output_b(*_git, 'config', 'extensions.partialClone', 'true') cmd_output_b( - *_git, 'fetch', 'origin', 'HEAD', - '--quiet', '--filter=blob:none', '--tags', + *git_cmd, 'fetch', 'origin', 'HEAD', '--tags', + cwd=tmp, ) try: - rev = cmd_output(*tag_cmd)[1].strip() + rev = cmd_output(*tag_cmd, cwd=tmp)[1].strip() except CalledProcessError: - rev = cmd_output(*_git, 'rev-parse', 'FETCH_HEAD')[1].strip() + cmd = (*git_cmd, 'rev-parse', 'FETCH_HEAD') + rev = cmd_output(*cmd, cwd=tmp)[1].strip() else: if tags_only: rev = git.get_best_candidate_tag(rev, tmp) frozen = None if freeze: - exact = cmd_output(*_git, 'rev-parse', rev)[1].strip() + exact_rev_cmd = (*git_cmd, 'rev-parse', rev) + exact = cmd_output(*exact_rev_cmd, cwd=tmp)[1].strip() if exact != rev: rev, frozen = exact, rev - - try: - # workaround for windows -- see #2865 - cmd_output_b(*_git, 'show', f'{rev}:{C.MANIFEST_FILE}') - cmd_output(*_git, 'checkout', rev, '--', C.MANIFEST_FILE) - except CalledProcessError: - pass # this will be caught by manifest validating code - try: - manifest = load_manifest(os.path.join(tmp, C.MANIFEST_FILE)) - except InvalidManifestError as e: - raise RepositoryCannotBeUpdatedError(f'[{self.repo}] {e}') - else: - hook_ids = frozenset(hook['id'] for hook in manifest) - - return self._replace(rev=rev, frozen=frozen, hook_ids=hook_ids) + return self._replace(rev=rev, frozen=frozen) class RepositoryCannotBeUpdatedError(RuntimeError): @@ -89,30 +79,24 @@ class RepositoryCannotBeUpdatedError(RuntimeError): def _check_hooks_still_exist_at_rev( repo_config: dict[str, Any], info: RevInfo, + store: Store, ) -> None: + try: + path = store.clone(repo_config['repo'], info.rev) + manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE)) + except InvalidManifestError as e: + raise RepositoryCannotBeUpdatedError(str(e)) + # See if any of our hooks were deleted with the new commits hooks = {hook['id'] for hook in repo_config['hooks']} - hooks_missing = hooks - info.hook_ids + hooks_missing = hooks - {hook['id'] for hook in manifest} if hooks_missing: raise RepositoryCannotBeUpdatedError( - f'[{info.repo}] Cannot update because the update target is ' - f'missing these hooks: {", ".join(sorted(hooks_missing))}', + f'Cannot update because the update target is missing these ' + f'hooks:\n{", ".join(sorted(hooks_missing))}', ) -def _update_one( - i: int, - repo: dict[str, Any], - *, - tags_only: bool, - freeze: bool, -) -> tuple[int, RevInfo, RevInfo]: - old = RevInfo.from_config(repo) - new = old.update(tags_only=tags_only, freeze=freeze) - _check_hooks_still_exist_at_rev(repo, new) - return i, old, new - - REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$') @@ -161,53 +145,49 @@ def _write_new_config(path: str, rev_infos: list[RevInfo | None]) -> None: def autoupdate( config_file: str, + store: Store, tags_only: bool, freeze: bool, repos: Sequence[str] = (), - jobs: int = 1, ) -> int: """Auto-update the pre-commit config to the latest versions of repos.""" migrate_config(config_file, quiet=True) - changed = False retv = 0 + rev_infos: list[RevInfo | None] = [] + changed = False - config_repos = [ - repo for repo in load_config(config_file)['repos'] - if repo['repo'] not in {LOCAL, META} - ] + config = load_config(config_file) + for repo_config in config['repos']: + if repo_config['repo'] in {LOCAL, META}: + continue - rev_infos: list[RevInfo | None] = [None] * len(config_repos) - jobs = jobs or xargs.cpu_count() # 0 => number of cpus - jobs = min(jobs, len(repos) or len(config_repos)) # max 1-per-thread - jobs = max(jobs, 1) # at least one thread - with concurrent.futures.ThreadPoolExecutor(jobs) as exe: - futures = [ - exe.submit( - _update_one, - i, repo, tags_only=tags_only, freeze=freeze, - ) - for i, repo in enumerate(config_repos) - if not repos or repo['repo'] in repos - ] - for future in concurrent.futures.as_completed(futures): - try: - i, old, new = future.result() - except RepositoryCannotBeUpdatedError as e: - output.write_line(str(e)) - retv = 1 + info = RevInfo.from_config(repo_config) + if repos and info.repo not in repos: + rev_infos.append(None) + continue + + output.write(f'Updating {info.repo} ... ') + new_info = info.update(tags_only=tags_only, freeze=freeze) + try: + _check_hooks_still_exist_at_rev(repo_config, new_info, store) + except RepositoryCannotBeUpdatedError as error: + output.write_line(error.args[0]) + rev_infos.append(None) + retv = 1 + continue + + if new_info.rev != info.rev: + changed = True + if new_info.frozen: + updated_to = f'{new_info.frozen} (frozen)' else: - if new.rev != old.rev: - changed = True - if new.frozen: - new_s = f'{new.frozen} (frozen)' - else: - new_s = new.rev - msg = f'updating {old.rev} -> {new_s}' - rev_infos[i] = new - else: - msg = 'already up to date!' - - output.write_line(f'[{old.repo}] {msg}') + updated_to = new_info.rev + msg = f'updating {info.rev} -> {updated_to}.' + output.write_line(msg) + rev_infos.append(new_info) + else: + output.write_line('already up to date.') + rev_infos.append(None) if changed: _write_new_config(config_file, rev_infos) diff --git a/pre_commit/commands/gc.py b/pre_commit/commands/gc.py index 975d5e4c..6892e097 100644 --- a/pre_commit/commands/gc.py +++ b/pre_commit/commands/gc.py @@ -12,7 +12,6 @@ from pre_commit.clientlib import load_manifest from pre_commit.clientlib import LOCAL from pre_commit.clientlib import META from pre_commit.store import Store -from pre_commit.util import rmtree def _mark_used_repos( @@ -27,8 +26,7 @@ def _mark_used_repos( for hook in repo['hooks']: deps = hook.get('additional_dependencies') unused_repos.discard(( - store.db_repo_name(repo['repo'], deps), - C.LOCAL_REPO_VERSION, + store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION, )) else: key = (repo['repo'], repo['rev']) @@ -58,41 +56,34 @@ def _mark_used_repos( )) -def _gc(store: Store) -> int: - with store.exclusive_lock(), store.connect() as db: - store._create_configs_table(db) +def _gc_repos(store: Store) -> int: + configs = store.select_all_configs() + repos = store.select_all_repos() - repos = db.execute('SELECT repo, ref, path FROM repos').fetchall() - all_repos = {(repo, ref): path for repo, ref, path in repos} - unused_repos = set(all_repos) + # delete config paths which do not exist + dead_configs = [p for p in configs if not os.path.exists(p)] + live_configs = [p for p in configs if os.path.exists(p)] - configs_rows = db.execute('SELECT path FROM configs').fetchall() - configs = [path for path, in configs_rows] + all_repos = {(repo, ref): path for repo, ref, path in repos} + unused_repos = set(all_repos) + for config_path in live_configs: + try: + config = load_config(config_path) + except InvalidConfigError: + dead_configs.append(config_path) + continue + else: + for repo in config['repos']: + _mark_used_repos(store, all_repos, unused_repos, repo) - dead_configs = [] - for config_path in configs: - try: - config = load_config(config_path) - except InvalidConfigError: - dead_configs.append(config_path) - continue - else: - for repo in config['repos']: - _mark_used_repos(store, all_repos, unused_repos, repo) - - paths = [(path,) for path in dead_configs] - db.executemany('DELETE FROM configs WHERE path = ?', paths) - - db.executemany( - 'DELETE FROM repos WHERE repo = ? and ref = ?', - sorted(unused_repos), - ) - for k in unused_repos: - rmtree(all_repos[k]) - - return len(unused_repos) + store.delete_configs(dead_configs) + for db_repo_name, ref in unused_repos: + store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)]) + return len(unused_repos) def gc(store: Store) -> int: - output.write_line(f'{_gc(store)} repo(s) removed.') + with store.exclusive_lock(): + repos_removed = _gc_repos(store) + output.write_line(f'{repos_removed} repo(s) removed.') return 0 diff --git a/pre_commit/commands/hazmat.py b/pre_commit/commands/hazmat.py deleted file mode 100644 index 01b27ce6..00000000 --- a/pre_commit/commands/hazmat.py +++ /dev/null @@ -1,95 +0,0 @@ -from __future__ import annotations - -import argparse -import subprocess -from collections.abc import Sequence - -from pre_commit.parse_shebang import normalize_cmd - - -def add_parsers(parser: argparse.ArgumentParser) -> None: - subparsers = parser.add_subparsers(dest='tool') - - cd_parser = subparsers.add_parser( - 'cd', help='cd to a subdir and run the command', - ) - cd_parser.add_argument('subdir') - cd_parser.add_argument('cmd', nargs=argparse.REMAINDER) - - ignore_exit_code_parser = subparsers.add_parser( - 'ignore-exit-code', help='run the command but ignore the exit code', - ) - ignore_exit_code_parser.add_argument('cmd', nargs=argparse.REMAINDER) - - n1_parser = subparsers.add_parser( - 'n1', help='run the command once per filename', - ) - n1_parser.add_argument('cmd', nargs=argparse.REMAINDER) - - -def _cmd_filenames(cmd: tuple[str, ...]) -> tuple[ - tuple[str, ...], - tuple[str, ...], -]: - for idx, val in enumerate(reversed(cmd)): - if val == '--': - split = len(cmd) - idx - break - else: - raise SystemExit('hazmat entry must end with `--`') - - return cmd[:split - 1], cmd[split:] - - -def cd(subdir: str, cmd: tuple[str, ...]) -> int: - cmd, filenames = _cmd_filenames(cmd) - - prefix = f'{subdir}/' - new_filenames = [] - for filename in filenames: - if not filename.startswith(prefix): - raise SystemExit(f'unexpected file without {prefix=}: {filename}') - else: - new_filenames.append(filename.removeprefix(prefix)) - - cmd = normalize_cmd(cmd) - return subprocess.call((*cmd, *new_filenames), cwd=subdir) - - -def ignore_exit_code(cmd: tuple[str, ...]) -> int: - cmd = normalize_cmd(cmd) - subprocess.call(cmd) - return 0 - - -def n1(cmd: tuple[str, ...]) -> int: - cmd, filenames = _cmd_filenames(cmd) - cmd = normalize_cmd(cmd) - ret = 0 - for filename in filenames: - ret |= subprocess.call((*cmd, filename)) - return ret - - -def impl(args: argparse.Namespace) -> int: - args.cmd = tuple(args.cmd) - if args.tool == 'cd': - return cd(args.subdir, args.cmd) - elif args.tool == 'ignore-exit-code': - return ignore_exit_code(args.cmd) - elif args.tool == 'n1': - return n1(args.cmd) - else: - raise NotImplementedError(f'unexpected tool: {args.tool}') - - -def main(argv: Sequence[str] | None = None) -> int: - parser = argparse.ArgumentParser() - add_parsers(parser) - args = parser.parse_args(argv) - - return impl(args) - - -if __name__ == '__main__': - raise SystemExit(main()) diff --git a/pre_commit/commands/hook_impl.py b/pre_commit/commands/hook_impl.py index de5c8f34..dab2135d 100644 --- a/pre_commit/commands/hook_impl.py +++ b/pre_commit/commands/hook_impl.py @@ -4,7 +4,7 @@ import argparse import os.path import subprocess import sys -from collections.abc import Sequence +from typing import Sequence from pre_commit.commands.run import run from pre_commit.envcontext import envcontext @@ -106,7 +106,6 @@ def _ns( hook=None, verbose=False, show_diff_on_failure=False, - fail_fast=False, ) diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py index d19e0d47..5ff6cba6 100644 --- a/pre_commit/commands/install_uninstall.py +++ b/pre_commit/commands/install_uninstall.py @@ -103,7 +103,8 @@ def _install_hook_script( hook_file.write(before + TEMPLATE_START) hook_file.write(f'INSTALL_PYTHON={shlex.quote(sys.executable)}\n') - args_s = shlex.join(args) + # TODO: python3.8+: shlex.join + args_s = ' '.join(shlex.quote(part) for part in args) hook_file.write(f'ARGS=({args_s})\n') hook_file.write(TEMPLATE_END + after) make_executable(hook_path) diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py index b04c53a5..842fb3a7 100644 --- a/pre_commit/commands/migrate_config.py +++ b/pre_commit/commands/migrate_config.py @@ -1,21 +1,13 @@ from __future__ import annotations -import functools -import itertools +import re import textwrap -from collections.abc import Callable import cfgv import yaml -from yaml.nodes import ScalarNode from pre_commit.clientlib import InvalidConfigError -from pre_commit.yaml import yaml_compose from pre_commit.yaml import yaml_load -from pre_commit.yaml_rewrite import MappingKey -from pre_commit.yaml_rewrite import MappingValue -from pre_commit.yaml_rewrite import match -from pre_commit.yaml_rewrite import SequenceItem def _is_header_line(line: str) -> bool: @@ -46,69 +38,16 @@ def _migrate_map(contents: str) -> str: return contents -def _preserve_style(n: ScalarNode, *, s: str) -> str: - style = n.style or '' - return f'{style}{s}{style}' +def _migrate_sha_to_rev(contents: str) -> str: + return re.sub(r'(\n\s+)sha:', r'\1rev:', contents) -def _fix_stage(n: ScalarNode) -> str: - return _preserve_style(n, s=f'pre-{n.value}') - - -def _migrate_composed(contents: str) -> str: - tree = yaml_compose(contents) - rewrites: list[tuple[ScalarNode, Callable[[ScalarNode], str]]] = [] - - # sha -> rev - sha_to_rev_replace = functools.partial(_preserve_style, s='rev') - sha_to_rev_matcher = ( - MappingValue('repos'), - SequenceItem(), - MappingKey('sha'), +def _migrate_python_venv(contents: str) -> str: + return re.sub( + r'(\n\s+)language: python_venv\b', + r'\1language: python', + contents, ) - for node in match(tree, sha_to_rev_matcher): - rewrites.append((node, sha_to_rev_replace)) - - # python_venv -> python - language_matcher = ( - MappingValue('repos'), - SequenceItem(), - MappingValue('hooks'), - SequenceItem(), - MappingValue('language'), - ) - python_venv_replace = functools.partial(_preserve_style, s='python') - for node in match(tree, language_matcher): - if node.value == 'python_venv': - rewrites.append((node, python_venv_replace)) - - # stages rewrites - default_stages_matcher = (MappingValue('default_stages'), SequenceItem()) - default_stages_match = match(tree, default_stages_matcher) - hook_stages_matcher = ( - MappingValue('repos'), - SequenceItem(), - MappingValue('hooks'), - SequenceItem(), - MappingValue('stages'), - SequenceItem(), - ) - hook_stages_match = match(tree, hook_stages_matcher) - for node in itertools.chain(default_stages_match, hook_stages_match): - if node.value in {'commit', 'push', 'merge-commit'}: - rewrites.append((node, _fix_stage)) - - rewrites.sort(reverse=True, key=lambda nf: nf[0].start_mark.index) - - src_parts = [] - end: int | None = None - for node, func in rewrites: - src_parts.append(contents[node.end_mark.index:end]) - src_parts.append(func(node)) - end = node.start_mark.index - src_parts.append(contents[:end]) - src_parts.reverse() - return ''.join(src_parts) def migrate_config(config_file: str, quiet: bool = False) -> int: @@ -123,7 +62,8 @@ def migrate_config(config_file: str, quiet: bool = False) -> int: raise cfgv.ValidationError(str(e)) contents = _migrate_map(contents) - contents = _migrate_composed(contents) + contents = _migrate_sha_to_rev(contents) + contents = _migrate_python_venv(contents) if contents != orig_contents: with open(config_file, 'w') as f: diff --git a/pre_commit/commands/run.py b/pre_commit/commands/run.py index 8ab505ff..c867799e 100644 --- a/pre_commit/commands/run.py +++ b/pre_commit/commands/run.py @@ -9,11 +9,10 @@ import re import subprocess import time import unicodedata -from collections.abc import Generator -from collections.abc import Iterable -from collections.abc import MutableMapping -from collections.abc import Sequence from typing import Any +from typing import Collection +from typing import MutableMapping +from typing import Sequence from identify.identify import tags_from_path @@ -58,36 +57,37 @@ def _full_msg( def filter_by_include_exclude( - names: Iterable[str], + names: Collection[str], include: str, exclude: str, -) -> Generator[str]: +) -> list[str]: include_re, exclude_re = re.compile(include), re.compile(exclude) - return ( + return [ filename for filename in names if include_re.search(filename) if not exclude_re.search(filename) - ) + ] class Classifier: - def __init__(self, filenames: Iterable[str]) -> None: + def __init__(self, filenames: Collection[str]) -> None: self.filenames = [f for f in filenames if os.path.lexists(f)] - @functools.cache + @functools.lru_cache(maxsize=None) def _types_for_file(self, filename: str) -> set[str]: return tags_from_path(filename) def by_types( self, - names: Iterable[str], - types: Iterable[str], - types_or: Iterable[str], - exclude_types: Iterable[str], - ) -> Generator[str]: + names: Sequence[str], + types: Collection[str], + types_or: Collection[str], + exclude_types: Collection[str], + ) -> list[str]: types = frozenset(types) types_or = frozenset(types_or) exclude_types = frozenset(exclude_types) + ret = [] for filename in names: tags = self._types_for_file(filename) if ( @@ -95,24 +95,24 @@ class Classifier: (not types_or or tags & types_or) and not tags & exclude_types ): - yield filename + ret.append(filename) + return ret - def filenames_for_hook(self, hook: Hook) -> Generator[str]: - return self.by_types( - filter_by_include_exclude( - self.filenames, - hook.files, - hook.exclude, - ), + def filenames_for_hook(self, hook: Hook) -> tuple[str, ...]: + names = self.filenames + names = filter_by_include_exclude(names, hook.files, hook.exclude) + names = self.by_types( + names, hook.types, hook.types_or, hook.exclude_types, ) + return tuple(names) @classmethod def from_config( cls, - filenames: Iterable[str], + filenames: Collection[str], include: str, exclude: str, ) -> Classifier: @@ -121,7 +121,7 @@ class Classifier: # this also makes improperly quoted shell-based hooks work better # see #1173 if os.altsep == '/' and os.sep == '\\': - filenames = (f.replace(os.sep, os.altsep) for f in filenames) + filenames = [f.replace(os.sep, os.altsep) for f in filenames] filenames = filter_by_include_exclude(filenames, include, exclude) return Classifier(filenames) @@ -148,7 +148,7 @@ def _run_single_hook( verbose: bool, use_color: bool, ) -> tuple[bool, bytes]: - filenames = tuple(classifier.filenames_for_hook(hook)) + filenames = classifier.filenames_for_hook(hook) if hook.id in skips or hook.alias in skips: output.write( @@ -187,7 +187,7 @@ def _run_single_hook( if not hook.pass_filenames: filenames = () - time_before = time.monotonic() + time_before = time.time() language = languages[hook.language] with language.in_env(hook.prefix, hook.language_version): retcode, out = language.run_hook( @@ -199,7 +199,7 @@ def _run_single_hook( require_serial=hook.require_serial, color=use_color, ) - duration = round(time.monotonic() - time_before, 2) or 0 + duration = round(time.time() - time_before, 2) or 0 diff_after = _get_diff() # if the hook makes changes, fail the commit @@ -250,7 +250,7 @@ def _compute_cols(hooks: Sequence[Hook]) -> int: return max(cols, 80) -def _all_filenames(args: argparse.Namespace) -> Iterable[str]: +def _all_filenames(args: argparse.Namespace) -> Collection[str]: # these hooks do not operate on files if args.hook_stage in { 'post-checkout', 'post-commit', 'post-merge', 'post-rewrite', @@ -298,8 +298,7 @@ def _run_hooks( verbose=args.verbose, use_color=args.color, ) retval |= current_retval - fail_fast = (config['fail_fast'] or hook.fail_fast or args.fail_fast) - if current_retval and fail_fast: + if retval and (config['fail_fast'] or hook.fail_fast): break if retval and args.show_diff_on_failure and prior_diff: if args.all_files: diff --git a/pre_commit/commands/validate_config.py b/pre_commit/commands/validate_config.py index b3de635b..24bd3135 100644 --- a/pre_commit/commands/validate_config.py +++ b/pre_commit/commands/validate_config.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from typing import Sequence from pre_commit import clientlib diff --git a/pre_commit/commands/validate_manifest.py b/pre_commit/commands/validate_manifest.py index 8493c6e1..419031a9 100644 --- a/pre_commit/commands/validate_manifest.py +++ b/pre_commit/commands/validate_manifest.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from typing import Sequence from pre_commit import clientlib diff --git a/pre_commit/envcontext.py b/pre_commit/envcontext.py index d4d24118..4f595601 100644 --- a/pre_commit/envcontext.py +++ b/pre_commit/envcontext.py @@ -3,9 +3,10 @@ from __future__ import annotations import contextlib import enum import os -from collections.abc import Generator -from collections.abc import MutableMapping +from typing import Generator +from typing import MutableMapping from typing import NamedTuple +from typing import Tuple from typing import Union _Unset = enum.Enum('_Unset', 'UNSET') @@ -17,9 +18,9 @@ class Var(NamedTuple): default: str = '' -SubstitutionT = tuple[Union[str, Var], ...] +SubstitutionT = Tuple[Union[str, Var], ...] ValueT = Union[str, _Unset, SubstitutionT] -PatchesT = tuple[tuple[str, ValueT], ...] +PatchesT = Tuple[Tuple[str, ValueT], ...] def format_env(parts: SubstitutionT, env: MutableMapping[str, str]) -> str: @@ -33,7 +34,7 @@ def format_env(parts: SubstitutionT, env: MutableMapping[str, str]) -> str: def envcontext( patch: PatchesT, _env: MutableMapping[str, str] | None = None, -) -> Generator[None]: +) -> Generator[None, None, None]: """In this context, `os.environ` is modified according to `patch`. `patch` is an iterable of 2-tuples (key, value): diff --git a/pre_commit/error_handler.py b/pre_commit/error_handler.py index 4f0e0573..d740ee3e 100644 --- a/pre_commit/error_handler.py +++ b/pre_commit/error_handler.py @@ -5,7 +5,7 @@ import functools import os.path import sys import traceback -from collections.abc import Generator +from typing import Generator from typing import IO import pre_commit.constants as C @@ -68,7 +68,7 @@ def _log_and_exit( @contextlib.contextmanager -def error_handler() -> Generator[None]: +def error_handler() -> Generator[None, None, None]: try: yield except (Exception, KeyboardInterrupt) as e: diff --git a/pre_commit/file_lock.py b/pre_commit/file_lock.py index 6223f869..f67a5864 100644 --- a/pre_commit/file_lock.py +++ b/pre_commit/file_lock.py @@ -3,8 +3,8 @@ from __future__ import annotations import contextlib import errno import sys -from collections.abc import Callable -from collections.abc import Generator +from typing import Callable +from typing import Generator if sys.platform == 'win32': # pragma: no cover (windows) @@ -20,7 +20,7 @@ if sys.platform == 'win32': # pragma: no cover (windows) def _locked( fileno: int, blocked_cb: Callable[[], None], - ) -> Generator[None]: + ) -> Generator[None, None, None]: try: msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) except OSError: @@ -53,7 +53,7 @@ else: # pragma: win32 no cover def _locked( fileno: int, blocked_cb: Callable[[], None], - ) -> Generator[None]: + ) -> Generator[None, None, None]: try: fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB) except OSError: # pragma: no cover (tests are single-threaded) @@ -69,7 +69,7 @@ else: # pragma: win32 no cover def lock( path: str, blocked_cb: Callable[[], None], -) -> Generator[None]: +) -> Generator[None, None, None]: with open(path, 'a+') as f: with _locked(f.fileno(), blocked_cb): yield diff --git a/pre_commit/git.py b/pre_commit/git.py index ec1928f3..333dc7ba 100644 --- a/pre_commit/git.py +++ b/pre_commit/git.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging import os.path import sys -from collections.abc import Mapping +from typing import Mapping from pre_commit.errors import FatalError from pre_commit.util import CalledProcessError @@ -126,7 +126,7 @@ def get_conflicted_files() -> set[str]: merge_diff_filenames = zsplit( cmd_output( 'git', 'diff', '--name-only', '--no-ext-diff', '-z', - '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--', + '-m', tree_hash, 'HEAD', 'MERGE_HEAD', )[1], ) return set(merge_conflict_filenames) | set(merge_diff_filenames) @@ -219,7 +219,7 @@ def check_for_cygwin_mismatch() -> None: if is_cygwin_python ^ is_cygwin_git: exe_type = {True: '(cygwin)', False: '(windows)'} - logger.warning( + logger.warn( f'pre-commit has detected a mix of cygwin python / git\n' f'This combination is not supported, it is likely you will ' f'receive an error later in the program.\n' diff --git a/pre_commit/hook.py b/pre_commit/hook.py index 309cd5be..6d436ca3 100644 --- a/pre_commit/hook.py +++ b/pre_commit/hook.py @@ -1,9 +1,9 @@ from __future__ import annotations import logging -from collections.abc import Sequence from typing import Any from typing import NamedTuple +from typing import Sequence from pre_commit.prefix import Prefix diff --git a/pre_commit/lang_base.py b/pre_commit/lang_base.py index 198e9365..9480c559 100644 --- a/pre_commit/lang_base.py +++ b/pre_commit/lang_base.py @@ -1,23 +1,23 @@ from __future__ import annotations import contextlib +import multiprocessing import os import random import re import shlex -import sys -from collections.abc import Generator -from collections.abc import Sequence from typing import Any from typing import ContextManager +from typing import Generator from typing import NoReturn from typing import Protocol +from typing import Sequence import pre_commit.constants as C from pre_commit import parse_shebang -from pre_commit import xargs from pre_commit.prefix import Prefix from pre_commit.util import cmd_output_b +from pre_commit.xargs import xargs FIXED_RANDOM_SEED = 1542676187 @@ -128,7 +128,7 @@ def no_install( @contextlib.contextmanager -def no_env(prefix: Prefix, version: str) -> Generator[None]: +def no_env(prefix: Prefix, version: str) -> Generator[None, None, None]: yield @@ -140,7 +140,10 @@ def target_concurrency() -> int: if 'TRAVIS' in os.environ: return 2 else: - return xargs.cpu_count() + try: + return multiprocessing.cpu_count() + except NotImplementedError: + return 1 def _shuffled(seq: Sequence[str]) -> list[str]: @@ -168,14 +171,11 @@ def run_xargs( # ordering. file_args = _shuffled(file_args) jobs = target_concurrency() - return xargs.xargs(cmd, file_args, target_concurrency=jobs, color=color) + return xargs(cmd, file_args, target_concurrency=jobs, color=color) def hook_cmd(entry: str, args: Sequence[str]) -> tuple[str, ...]: - cmd = shlex.split(entry) - if cmd[:2] == ['pre-commit', 'hazmat']: - cmd = [sys.executable, '-m', 'pre_commit.commands.hazmat', *cmd[2:]] - return (*cmd, *args) + return (*shlex.split(entry), *args) def basic_run_hook( diff --git a/pre_commit/languages/conda.py b/pre_commit/languages/conda.py index d397ebeb..41c355e7 100644 --- a/pre_commit/languages/conda.py +++ b/pre_commit/languages/conda.py @@ -3,8 +3,8 @@ from __future__ import annotations import contextlib import os import sys -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -41,7 +41,7 @@ def get_env_patch(env: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/coursier.py b/pre_commit/languages/coursier.py index 08f9a958..9c5fbfe2 100644 --- a/pre_commit/languages/coursier.py +++ b/pre_commit/languages/coursier.py @@ -2,8 +2,8 @@ from __future__ import annotations import contextlib import os.path -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -70,7 +70,7 @@ def get_env_patch(target_dir: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/dart.py b/pre_commit/languages/dart.py index 52a229ee..e8539caa 100644 --- a/pre_commit/languages/dart.py +++ b/pre_commit/languages/dart.py @@ -4,8 +4,8 @@ import contextlib import os.path import shutil import tempfile -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -29,7 +29,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/docker.py b/pre_commit/languages/docker.py index 7f45ac86..8e53ca9e 100644 --- a/pre_commit/languages/docker.py +++ b/pre_commit/languages/docker.py @@ -1,12 +1,9 @@ from __future__ import annotations -import contextlib -import functools import hashlib import json import os -import re -from collections.abc import Sequence +from typing import Sequence from pre_commit import lang_base from pre_commit.prefix import Prefix @@ -19,34 +16,32 @@ get_default_version = lang_base.basic_get_default_version health_check = lang_base.basic_health_check in_env = lang_base.no_env # no special environment for docker -_HOSTNAME_MOUNT_RE = re.compile( - rb""" - /containers - (?:/overlay-containers)? - /([a-z0-9]{64}) - (?:/userdata)? - /hostname - """, - re.VERBOSE, -) + +def _is_in_docker() -> bool: + try: + with open('/proc/1/cgroup', 'rb') as f: + return b'docker' in f.read() + except FileNotFoundError: + return False -def _get_container_id() -> str | None: - with contextlib.suppress(FileNotFoundError): - with open('/proc/1/mountinfo', 'rb') as f: - for line in f: - m = _HOSTNAME_MOUNT_RE.search(line) - if m: - return m[1].decode() - - return None +def _get_container_id() -> str: + # It's assumed that we already check /proc/1/cgroup in _is_in_docker. The + # cpuset cgroup controller existed since cgroups were introduced so this + # way of getting the container ID is pretty reliable. + with open('/proc/1/cgroup', 'rb') as f: + for line in f.readlines(): + if line.split(b':')[1] == b'cpuset': + return os.path.basename(line.split(b':')[2]).strip().decode() + raise RuntimeError('Failed to find the container ID in /proc/1/cgroup.') def _get_docker_path(path: str) -> str: - container_id = _get_container_id() - if container_id is None: + if not _is_in_docker(): return path + container_id = _get_container_id() + try: _, out, _ = cmd_output_b('docker', 'inspect', container_id) except CalledProcessError: @@ -106,47 +101,17 @@ def install_environment( os.mkdir(directory) -@functools.lru_cache(maxsize=1) -def _is_rootless() -> bool: # pragma: win32 no cover - retcode, out, _ = cmd_output_b( - 'docker', 'system', 'info', '--format', '{{ json . }}', - ) - if retcode != 0: - return False - - info = json.loads(out) - try: - return ( - # docker: - # https://docs.docker.com/reference/api/engine/version/v1.48/#tag/System/operation/SystemInfo - 'name=rootless' in (info.get('SecurityOptions') or ()) or - # podman: - # https://docs.podman.io/en/latest/_static/api.html?version=v5.4#tag/system/operation/SystemInfoLibpod - info['host']['security']['rootless'] - ) - except KeyError: - return False - - def get_docker_user() -> tuple[str, ...]: # pragma: win32 no cover - if _is_rootless(): - return () - try: return ('-u', f'{os.getuid()}:{os.getgid()}') except AttributeError: return () -def get_docker_tty(*, color: bool) -> tuple[str, ...]: # pragma: win32 no cover # noqa: E501 - return (('--tty',) if color else ()) - - -def docker_cmd(*, color: bool) -> tuple[str, ...]: # pragma: win32 no cover +def docker_cmd() -> tuple[str, ...]: # pragma: win32 no cover return ( 'docker', 'run', '--rm', - *get_docker_tty(color=color), *get_docker_user(), # https://docs.docker.com/engine/reference/commandline/run/#mount-volumes-from-container-volumes-from # The `Z` option tells Docker to label the content with a private @@ -174,7 +139,7 @@ def run_hook( entry_tag = ('--entrypoint', entry_exe, docker_tag(prefix)) return lang_base.run_xargs( - (*docker_cmd(color=color), *entry_tag, *cmd_rest), + (*docker_cmd(), *entry_tag, *cmd_rest), file_args, require_serial=require_serial, color=color, diff --git a/pre_commit/languages/docker_image.py b/pre_commit/languages/docker_image.py index 60caa101..26f006e4 100644 --- a/pre_commit/languages/docker_image.py +++ b/pre_commit/languages/docker_image.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from typing import Sequence from pre_commit import lang_base from pre_commit.languages.docker import docker_cmd @@ -23,7 +23,7 @@ def run_hook( require_serial: bool, color: bool, ) -> tuple[int, bytes]: # pragma: win32 no cover - cmd = docker_cmd(color=color) + lang_base.hook_cmd(entry, args) + cmd = docker_cmd() + lang_base.hook_cmd(entry, args) return lang_base.run_xargs( cmd, file_args, diff --git a/pre_commit/languages/dotnet.py b/pre_commit/languages/dotnet.py index ffc65d1e..e9568f22 100644 --- a/pre_commit/languages/dotnet.py +++ b/pre_commit/languages/dotnet.py @@ -6,8 +6,8 @@ import re import tempfile import xml.etree.ElementTree import zipfile -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -30,14 +30,14 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield @contextlib.contextmanager -def _nuget_config_no_sources() -> Generator[str]: +def _nuget_config_no_sources() -> Generator[str, None, None]: with tempfile.TemporaryDirectory() as tmpdir: nuget_config = os.path.join(tmpdir, 'nuget.config') with open(nuget_config, 'w') as f: diff --git a/pre_commit/languages/fail.py b/pre_commit/languages/fail.py index 6ac4d767..a8ec6a53 100644 --- a/pre_commit/languages/fail.py +++ b/pre_commit/languages/fail.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from typing import Sequence from pre_commit import lang_base from pre_commit.prefix import Prefix diff --git a/pre_commit/languages/golang.py b/pre_commit/languages/golang.py index bedbd114..bea91e9b 100644 --- a/pre_commit/languages/golang.py +++ b/pre_commit/languages/golang.py @@ -12,18 +12,17 @@ import tempfile import urllib.error import urllib.request import zipfile -from collections.abc import Generator -from collections.abc import Sequence from typing import ContextManager +from typing import Generator from typing import IO from typing import Protocol +from typing import Sequence import pre_commit.constants as C from pre_commit import lang_base from pre_commit.envcontext import envcontext from pre_commit.envcontext import PatchesT from pre_commit.envcontext import Var -from pre_commit.git import no_git_env from pre_commit.prefix import Prefix from pre_commit.util import cmd_output from pre_commit.util import rmtree @@ -75,7 +74,6 @@ def get_env_patch(venv: str, version: str) -> PatchesT: return ( ('GOROOT', os.path.join(venv, '.go')), - ('GOTOOLCHAIN', 'local'), ( 'PATH', ( os.path.join(venv, 'bin'), os.pathsep, @@ -90,7 +88,8 @@ def _infer_go_version(version: str) -> str: if version != C.DEFAULT: return version resp = urllib.request.urlopen('https://go.dev/dl/?mode=json') - return json.load(resp)[0]['version'].removeprefix('go') + # TODO: 3.9+ .removeprefix('go') + return json.load(resp)[0]['version'][2:] def _get_url(version: str) -> str: @@ -121,7 +120,7 @@ def _install_go(version: str, dest: str) -> None: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield @@ -142,10 +141,9 @@ def install_environment( else: gopath = env_dir - env = no_git_env(dict(os.environ, GOPATH=gopath)) + env = dict(os.environ, GOPATH=gopath) env.pop('GOBIN', None) if version != 'system': - env['GOTOOLCHAIN'] = 'local' env['GOROOT'] = os.path.join(env_dir, '.go') env['PATH'] = os.pathsep.join(( os.path.join(env_dir, '.go', 'bin'), os.environ['PATH'], diff --git a/pre_commit/languages/haskell.py b/pre_commit/languages/haskell.py deleted file mode 100644 index 28bca08c..00000000 --- a/pre_commit/languages/haskell.py +++ /dev/null @@ -1,56 +0,0 @@ -from __future__ import annotations - -import contextlib -import os.path -from collections.abc import Generator -from collections.abc import Sequence - -from pre_commit import lang_base -from pre_commit.envcontext import envcontext -from pre_commit.envcontext import PatchesT -from pre_commit.envcontext import Var -from pre_commit.errors import FatalError -from pre_commit.prefix import Prefix - -ENVIRONMENT_DIR = 'hs_env' -get_default_version = lang_base.basic_get_default_version -health_check = lang_base.basic_health_check -run_hook = lang_base.basic_run_hook - - -def get_env_patch(target_dir: str) -> PatchesT: - bin_path = os.path.join(target_dir, 'bin') - return (('PATH', (bin_path, os.pathsep, Var('PATH'))),) - - -@contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - with envcontext(get_env_patch(envdir)): - yield - - -def install_environment( - prefix: Prefix, - version: str, - additional_dependencies: Sequence[str], -) -> None: - lang_base.assert_version_default('haskell', version) - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - - pkgs = [*prefix.star('.cabal'), *additional_dependencies] - if not pkgs: - raise FatalError('Expected .cabal files or additional_dependencies') - - bindir = os.path.join(envdir, 'bin') - os.makedirs(bindir, exist_ok=True) - lang_base.setup_cmd(prefix, ('cabal', 'update')) - lang_base.setup_cmd( - prefix, - ( - 'cabal', 'install', - '--install-method', 'copy', - '--installdir', bindir, - *pkgs, - ), - ) diff --git a/pre_commit/languages/julia.py b/pre_commit/languages/julia.py deleted file mode 100644 index 7559b5ba..00000000 --- a/pre_commit/languages/julia.py +++ /dev/null @@ -1,133 +0,0 @@ -from __future__ import annotations - -import contextlib -import os -import shutil -from collections.abc import Generator -from collections.abc import Sequence - -from pre_commit import lang_base -from pre_commit.envcontext import envcontext -from pre_commit.envcontext import PatchesT -from pre_commit.envcontext import UNSET -from pre_commit.prefix import Prefix -from pre_commit.util import cmd_output_b - -ENVIRONMENT_DIR = 'juliaenv' -health_check = lang_base.basic_health_check -get_default_version = lang_base.basic_get_default_version - - -def run_hook( - prefix: Prefix, - entry: str, - args: Sequence[str], - file_args: Sequence[str], - *, - is_local: bool, - require_serial: bool, - color: bool, -) -> tuple[int, bytes]: - # `entry` is a (hook-repo relative) file followed by (optional) args, e.g. - # `bin/id.jl` or `bin/hook.jl --arg1 --arg2` so we - # 1) shell parse it and join with args with hook_cmd - # 2) prepend the hooks prefix path to the first argument (the file), unless - # it is a local script - # 3) prepend `julia` as the interpreter - - cmd = lang_base.hook_cmd(entry, args) - script = cmd[0] if is_local else prefix.path(cmd[0]) - cmd = ('julia', '--startup-file=no', script, *cmd[1:]) - return lang_base.run_xargs( - cmd, - file_args, - require_serial=require_serial, - color=color, - ) - - -def get_env_patch(target_dir: str, version: str) -> PatchesT: - return ( - ('JULIA_LOAD_PATH', target_dir), - # May be set, remove it to not interfer with LOAD_PATH - ('JULIA_PROJECT', UNSET), - ) - - -@contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - with envcontext(get_env_patch(envdir, version)): - yield - - -def install_environment( - prefix: Prefix, - version: str, - additional_dependencies: Sequence[str], -) -> None: - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - with in_env(prefix, version): - # TODO: Support language_version with juliaup similar to rust via - # rustup - # if version != 'system': - # ... - - # Copy Project.toml to hook env if it exist - os.makedirs(envdir, exist_ok=True) - project_names = ('JuliaProject.toml', 'Project.toml') - project_found = False - for project_name in project_names: - project_file = prefix.path(project_name) - if not os.path.isfile(project_file): - continue - shutil.copy(project_file, envdir) - project_found = True - break - - # If no project file was found we create an empty one so that the - # package manager doesn't error - if not project_found: - open(os.path.join(envdir, 'Project.toml'), 'a').close() - - # Copy Manifest.toml to hook env if it exists - manifest_names = ('JuliaManifest.toml', 'Manifest.toml') - for manifest_name in manifest_names: - manifest_file = prefix.path(manifest_name) - if not os.path.isfile(manifest_file): - continue - shutil.copy(manifest_file, envdir) - break - - # Julia code to instantiate the hook environment - julia_code = """ - @assert length(ARGS) > 0 - hook_env = ARGS[1] - deps = join(ARGS[2:end], " ") - - # We prepend @stdlib here so that we can load the package manager even - # though `get_env_patch` limits `JULIA_LOAD_PATH` to just the hook env. - pushfirst!(LOAD_PATH, "@stdlib") - using Pkg - popfirst!(LOAD_PATH) - - # Instantiate the environment shipped with the hook repo. If we have - # additional dependencies we disable precompilation in this step to - # avoid double work. - precompile = isempty(deps) ? "1" : "0" - withenv("JULIA_PKG_PRECOMPILE_AUTO" => precompile) do - Pkg.instantiate() - end - - # Add additional dependencies (with precompilation) - if !isempty(deps) - withenv("JULIA_PKG_PRECOMPILE_AUTO" => "1") do - Pkg.REPLMode.pkgstr("add " * deps) - end - end - """ - cmd_output_b( - 'julia', '--startup-file=no', '-e', julia_code, '--', envdir, - *additional_dependencies, - cwd=prefix.prefix_dir, - ) diff --git a/pre_commit/languages/lua.py b/pre_commit/languages/lua.py index 15ac1a2e..12d06614 100644 --- a/pre_commit/languages/lua.py +++ b/pre_commit/languages/lua.py @@ -3,8 +3,8 @@ from __future__ import annotations import contextlib import os import sys -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -44,7 +44,7 @@ def get_env_patch(d: str) -> PatchesT: # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/node.py b/pre_commit/languages/node.py index af7dc6f8..66d61363 100644 --- a/pre_commit/languages/node.py +++ b/pre_commit/languages/node.py @@ -4,8 +4,8 @@ import contextlib import functools import os import sys -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence import pre_commit.constants as C from pre_commit import lang_base @@ -59,7 +59,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield @@ -93,7 +93,7 @@ def install_environment( # install as if we installed from git local_install_cmd = ( - 'npm', 'install', '--include=dev', '--include=prod', + 'npm', 'install', '--dev', '--prod', '--ignore-prepublish', '--no-progress', '--no-save', ) lang_base.setup_cmd(prefix, local_install_cmd) diff --git a/pre_commit/languages/perl.py b/pre_commit/languages/perl.py index a07d442a..2a7f1629 100644 --- a/pre_commit/languages/perl.py +++ b/pre_commit/languages/perl.py @@ -3,8 +3,8 @@ from __future__ import annotations import contextlib import os import shlex -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -33,7 +33,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/pygrep.py b/pre_commit/languages/pygrep.py index 72a9345f..ec55560b 100644 --- a/pre_commit/languages/pygrep.py +++ b/pre_commit/languages/pygrep.py @@ -3,9 +3,9 @@ from __future__ import annotations import argparse import re import sys -from collections.abc import Sequence -from re import Pattern from typing import NamedTuple +from typing import Pattern +from typing import Sequence from pre_commit import lang_base from pre_commit import output diff --git a/pre_commit/languages/python.py b/pre_commit/languages/python.py index 88ececce..3ef34360 100644 --- a/pre_commit/languages/python.py +++ b/pre_commit/languages/python.py @@ -4,8 +4,8 @@ import contextlib import functools import os import sys -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence import pre_commit.constants as C from pre_commit import lang_base @@ -24,7 +24,7 @@ ENVIRONMENT_DIR = 'py_env' run_hook = lang_base.basic_run_hook -@functools.cache +@functools.lru_cache(maxsize=None) def _version_info(exe: str) -> str: prog = 'import sys;print(".".join(str(p) for p in sys.version_info))' try: @@ -65,7 +65,7 @@ def _find_by_py_launcher( version: str, ) -> str | None: # pragma: no cover (windows only) if version.startswith('python'): - num = version.removeprefix('python') + num = version[len('python'):] cmd = ('py', f'-{num}', '-c', 'import sys; print(sys.executable)') env = dict(os.environ, PYTHONIOENCODING='UTF-8') try: @@ -75,13 +75,6 @@ def _find_by_py_launcher( return None -def _impl_exe_name() -> str: - if sys.implementation.name == 'cpython': # pragma: cpython cover - return 'python' - else: # pragma: cpython no cover - return sys.implementation.name # pypy mostly - - def _find_by_sys_executable() -> str | None: def _norm(path: str) -> str | None: _, exe = os.path.split(path.lower()) @@ -107,25 +100,18 @@ def _find_by_sys_executable() -> str | None: @functools.lru_cache(maxsize=1) def get_default_version() -> str: # pragma: no cover (platform dependent) - v_major = f'{sys.version_info[0]}' - v_minor = f'{sys.version_info[0]}.{sys.version_info[1]}' + # First attempt from `sys.executable` (or the realpath) + exe = _find_by_sys_executable() + if exe: + return exe - # attempt the likely implementation exe - for potential in (v_minor, v_major): - exe = f'{_impl_exe_name()}{potential}' - if find_executable(exe): - return exe + # Next try the `pythonX.X` executable + exe = f'python{sys.version_info[0]}.{sys.version_info[1]}' + if find_executable(exe): + return exe - # next try `sys.executable` (or the realpath) - maybe_exe = _find_by_sys_executable() - if maybe_exe: - return maybe_exe - - # maybe on windows we can find it via py launcher? - if sys.platform == 'win32': # pragma: win32 cover - exe = f'python{v_minor}' - if _find_by_py_launcher(exe): - return exe + if _find_by_py_launcher(exe): + return exe # We tried! return C.DEFAULT @@ -138,7 +124,7 @@ def _sys_executable_matches(version: str) -> bool: return False try: - info = tuple(int(p) for p in version.removeprefix('python').split('.')) + info = tuple(int(p) for p in version[len('python'):].split('.')) except ValueError: return False @@ -166,7 +152,7 @@ def norm_version(version: str) -> str | None: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/r.py b/pre_commit/languages/r.py index f70d2fdc..138a26e1 100644 --- a/pre_commit/languages/r.py +++ b/pre_commit/languages/r.py @@ -4,120 +4,21 @@ import contextlib import os import shlex import shutil -import tempfile -import textwrap -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext from pre_commit.envcontext import PatchesT from pre_commit.envcontext import UNSET from pre_commit.prefix import Prefix -from pre_commit.util import cmd_output +from pre_commit.util import cmd_output_b from pre_commit.util import win_exe ENVIRONMENT_DIR = 'renv' +RSCRIPT_OPTS = ('--no-save', '--no-restore', '--no-site-file', '--no-environ') get_default_version = lang_base.basic_get_default_version - -_RENV_ACTIVATED_OPTS = ( - '--no-save', '--no-restore', '--no-site-file', '--no-environ', -) - - -def _execute_r( - code: str, *, - prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, - cli_opts: Sequence[str], -) -> str: - with in_env(prefix, version), _r_code_in_tempfile(code) as f: - _, out, _ = cmd_output( - _rscript_exec(), *cli_opts, f, *args, cwd=cwd, - ) - return out.rstrip('\n') - - -def _execute_r_in_renv( - code: str, *, - prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, -) -> str: - return _execute_r( - code=code, prefix=prefix, version=version, args=args, cwd=cwd, - cli_opts=_RENV_ACTIVATED_OPTS, - ) - - -def _execute_vanilla_r( - code: str, *, - prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, -) -> str: - return _execute_r( - code=code, prefix=prefix, version=version, args=args, cwd=cwd, - cli_opts=('--vanilla',), - ) - - -def _read_installed_version(envdir: str, prefix: Prefix, version: str) -> str: - return _execute_r_in_renv( - 'cat(renv::settings$r.version())', - prefix=prefix, version=version, - cwd=envdir, - ) - - -def _read_executable_version(envdir: str, prefix: Prefix, version: str) -> str: - return _execute_r_in_renv( - 'cat(as.character(getRversion()))', - prefix=prefix, version=version, - cwd=envdir, - ) - - -def _write_current_r_version( - envdir: str, prefix: Prefix, version: str, -) -> None: - _execute_r_in_renv( - 'renv::settings$r.version(as.character(getRversion()))', - prefix=prefix, version=version, - cwd=envdir, - ) - - -def health_check(prefix: Prefix, version: str) -> str | None: - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - - r_version_installation = _read_installed_version( - envdir=envdir, prefix=prefix, version=version, - ) - r_version_current_executable = _read_executable_version( - envdir=envdir, prefix=prefix, version=version, - ) - if r_version_installation in {'NULL', ''}: - return ( - f'Hooks were installed with an unknown R version. R version for ' - f'hook repo now set to {r_version_current_executable}' - ) - elif r_version_installation != r_version_current_executable: - return ( - f'Hooks were installed for R version {r_version_installation}, ' - f'but current R executable has version ' - f'{r_version_current_executable}' - ) - - return None - - -@contextlib.contextmanager -def _r_code_in_tempfile(code: str) -> Generator[str]: - """ - To avoid quoting and escaping issues, avoid `Rscript [options] -e {expr}` - but use `Rscript [options] path/to/file_with_expr.R` - """ - with tempfile.TemporaryDirectory() as tmpdir: - fname = os.path.join(tmpdir, 'script.R') - with open(fname, 'w') as f: - f.write(_inline_r_setup(textwrap.dedent(code))) - yield fname +health_check = lang_base.basic_health_check def get_env_patch(venv: str) -> PatchesT: @@ -128,7 +29,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield @@ -184,7 +85,7 @@ def _cmd_from_hook( _entry_validate(cmd) cmd_part = _prefix_if_file_entry(cmd, prefix, is_local=is_local) - return (cmd[0], *_RENV_ACTIVATED_OPTS, *cmd_part, *args) + return (cmd[0], *RSCRIPT_OPTS, *cmd_part, *args) def install_environment( @@ -192,8 +93,6 @@ def install_environment( version: str, additional_dependencies: Sequence[str], ) -> None: - lang_base.assert_version_default('r', version) - env_dir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) os.makedirs(env_dir, exist_ok=True) shutil.copy(prefix.path('renv.lock'), env_dir) @@ -227,19 +126,21 @@ def install_environment( renv::install(prefix_dir) }} """ - _execute_vanilla_r( - r_code_inst_environment, - prefix=prefix, version=version, cwd=env_dir, - ) - _write_current_r_version(envdir=env_dir, prefix=prefix, version=version) + cmd_output_b( + _rscript_exec(), '--vanilla', '-e', + _inline_r_setup(r_code_inst_environment), + cwd=env_dir, + ) if additional_dependencies: r_code_inst_add = 'renv::install(commandArgs(trailingOnly = TRUE))' - _execute_r_in_renv( - code=r_code_inst_add, prefix=prefix, version=version, - args=additional_dependencies, - cwd=env_dir, - ) + with in_env(prefix, version): + cmd_output_b( + _rscript_exec(), *RSCRIPT_OPTS, '-e', + _inline_r_setup(r_code_inst_add), + *additional_dependencies, + cwd=env_dir, + ) def _inline_r_setup(code: str) -> str: @@ -247,16 +148,11 @@ def _inline_r_setup(code: str) -> str: Some behaviour of R cannot be configured via env variables, but can only be configured via R options once R has started. These are set here. """ - with_option = [ - textwrap.dedent("""\ - options( - install.packages.compile.from.source = "never", - pkgType = "binary" - ) - """), - code, - ] - return '\n'.join(with_option) + with_option = f"""\ + options(install.packages.compile.from.source = "never", pkgType = "binary") + {code} + """ + return with_option def run_hook( diff --git a/pre_commit/languages/ruby.py b/pre_commit/languages/ruby.py index f32fea3f..76631f25 100644 --- a/pre_commit/languages/ruby.py +++ b/pre_commit/languages/ruby.py @@ -6,9 +6,9 @@ import importlib.resources import os.path import shutil import tarfile -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator from typing import IO +from typing import Sequence import pre_commit.constants as C from pre_commit import lang_base @@ -25,8 +25,7 @@ run_hook = lang_base.basic_run_hook def _resource_bytesio(filename: str) -> IO[bytes]: - files = importlib.resources.files('pre_commit.resources') - return files.joinpath(filename).open('rb') + return importlib.resources.open_binary('pre_commit.resources', filename) @functools.lru_cache(maxsize=1) @@ -73,7 +72,7 @@ def get_env_patch( @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield @@ -115,8 +114,6 @@ def _install_ruby( def install_environment( prefix: Prefix, version: str, additional_dependencies: Sequence[str], ) -> None: - envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) - if version != 'system': # pragma: win32 no cover _install_rbenv(prefix, version) with in_env(prefix, version): @@ -138,8 +135,6 @@ def install_environment( 'gem', 'install', '--no-document', '--no-format-executable', '--no-user-install', - '--install-dir', os.path.join(envdir, 'gems'), - '--bindir', os.path.join(envdir, 'gems', 'bin'), *prefix.star('.gem'), *additional_dependencies, ), ) diff --git a/pre_commit/languages/rust.py b/pre_commit/languages/rust.py index fd77a9d2..7eec0e7d 100644 --- a/pre_commit/languages/rust.py +++ b/pre_commit/languages/rust.py @@ -7,8 +7,8 @@ import shutil import sys import tempfile import urllib.request -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence import pre_commit.constants as C from pre_commit import lang_base @@ -34,7 +34,7 @@ def get_default_version() -> str: # Just detecting the executable does not suffice, because if rustup is # installed but no toolchain is available, then `cargo` exists but # cannot be used without installing a toolchain first. - if cmd_output_b('cargo', '--version', check=False, cwd='/')[0] == 0: + if cmd_output_b('cargo', '--version', check=False)[0] == 0: return 'system' else: return C.DEFAULT @@ -61,7 +61,7 @@ def get_env_patch(target_dir: str, version: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield @@ -134,7 +134,7 @@ def install_environment( packages_to_install: set[tuple[str, ...]] = {('--path', '.')} for cli_dep in cli_deps: - cli_dep = cli_dep.removeprefix('cli:') + cli_dep = cli_dep[len('cli:'):] package, _, crate_version = cli_dep.partition(':') if crate_version != '': packages_to_install.add((package, '--version', crate_version)) diff --git a/pre_commit/languages/unsupported_script.py b/pre_commit/languages/script.py similarity index 95% rename from pre_commit/languages/unsupported_script.py rename to pre_commit/languages/script.py index 1eaa1e27..89a3ab2d 100644 --- a/pre_commit/languages/unsupported_script.py +++ b/pre_commit/languages/script.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from typing import Sequence from pre_commit import lang_base from pre_commit.prefix import Prefix diff --git a/pre_commit/languages/swift.py b/pre_commit/languages/swift.py index 08a9c39a..f16bb045 100644 --- a/pre_commit/languages/swift.py +++ b/pre_commit/languages/swift.py @@ -2,8 +2,8 @@ from __future__ import annotations import contextlib import os -from collections.abc import Generator -from collections.abc import Sequence +from typing import Generator +from typing import Sequence from pre_commit import lang_base from pre_commit.envcontext import envcontext @@ -27,7 +27,7 @@ def get_env_patch(venv: str) -> PatchesT: # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover -def in_env(prefix: Prefix, version: str) -> Generator[None]: +def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/unsupported.py b/pre_commit/languages/system.py similarity index 100% rename from pre_commit/languages/unsupported.py rename to pre_commit/languages/system.py diff --git a/pre_commit/logging_handler.py b/pre_commit/logging_handler.py index 74772bee..1b68fc7d 100644 --- a/pre_commit/logging_handler.py +++ b/pre_commit/logging_handler.py @@ -2,7 +2,7 @@ from __future__ import annotations import contextlib import logging -from collections.abc import Generator +from typing import Generator from pre_commit import color from pre_commit import output @@ -32,7 +32,7 @@ class LoggingHandler(logging.Handler): @contextlib.contextmanager -def logging_handler(use_color: bool) -> Generator[None]: +def logging_handler(use_color: bool) -> Generator[None, None, None]: handler = LoggingHandler(use_color) logger.addHandler(handler) logger.setLevel(logging.INFO) diff --git a/pre_commit/main.py b/pre_commit/main.py index 0c3eefda..9615c5e1 100644 --- a/pre_commit/main.py +++ b/pre_commit/main.py @@ -4,13 +4,12 @@ import argparse import logging import os import sys -from collections.abc import Sequence +from typing import Sequence import pre_commit.constants as C from pre_commit import clientlib from pre_commit import git from pre_commit.color import add_color_option -from pre_commit.commands import hazmat from pre_commit.commands.autoupdate import autoupdate from pre_commit.commands.clean import clean from pre_commit.commands.gc import gc @@ -38,11 +37,8 @@ logger = logging.getLogger('pre_commit') # pyvenv os.environ.pop('__PYVENV_LAUNCHER__', None) -# https://github.com/getsentry/snuba/pull/5388 -os.environ.pop('PYTHONEXECUTABLE', None) - COMMANDS_NO_GIT = { - 'clean', 'gc', 'hazmat', 'init-templatedir', 'sample-config', + 'clean', 'gc', 'init-templatedir', 'sample-config', 'validate-config', 'validate-manifest', } @@ -63,10 +59,10 @@ def _add_hook_type_option(parser: argparse.ArgumentParser) -> None: def _add_run_options(parser: argparse.ArgumentParser) -> None: parser.add_argument('hook', nargs='?', help='A single hook-id to run') - parser.add_argument('--verbose', '-v', action='store_true') + parser.add_argument('--verbose', '-v', action='store_true', default=False) mutex_group = parser.add_mutually_exclusive_group(required=False) mutex_group.add_argument( - '--all-files', '-a', action='store_true', + '--all-files', '-a', action='store_true', default=False, help='Run on all the files in the repo.', ) mutex_group.add_argument( @@ -77,10 +73,6 @@ def _add_run_options(parser: argparse.ArgumentParser) -> None: '--show-diff-on-failure', action='store_true', help='When hooks fail, run `git diff` directly afterward.', ) - parser.add_argument( - '--fail-fast', action='store_true', - help='Stop after the first failing hook.', - ) parser.add_argument( '--hook-stage', choices=clientlib.STAGES, @@ -234,23 +226,14 @@ def main(argv: Sequence[str] | None = None) -> int: help='Store "frozen" hashes in `rev` instead of tag names', ) autoupdate_parser.add_argument( - '--repo', dest='repos', action='append', metavar='REPO', default=[], + '--repo', dest='repos', action='append', metavar='REPO', help='Only update this repository -- may be specified multiple times.', ) - autoupdate_parser.add_argument( - '-j', '--jobs', type=int, default=1, - help='Number of threads to use. (default %(default)s).', - ) _add_cmd('clean', help='Clean out pre-commit files.') _add_cmd('gc', help='Clean unused cached repos.') - hazmat_parser = _add_cmd( - 'hazmat', help='Composable tools for rare use in hook `entry`.', - ) - hazmat.add_parsers(hazmat_parser) - init_templatedir_parser = _add_cmd( 'init-templatedir', help=( @@ -285,7 +268,7 @@ def main(argv: Sequence[str] | None = None) -> int: ) _add_hook_type_option(install_parser) install_parser.add_argument( - '--allow-missing-config', action='store_true', + '--allow-missing-config', action='store_true', default=False, help=( 'Whether to allow a missing `pre-commit` configuration file ' 'or exit with a failure code.' @@ -385,18 +368,15 @@ def main(argv: Sequence[str] | None = None) -> int: if args.command == 'autoupdate': return autoupdate( - args.config, + args.config, store, tags_only=not args.bleeding_edge, freeze=args.freeze, repos=args.repos, - jobs=args.jobs, ) elif args.command == 'clean': return clean(store) elif args.command == 'gc': return gc(store) - elif args.command == 'hazmat': - return hazmat.impl(args) elif args.command == 'hook-impl': return hook_impl( store, diff --git a/pre_commit/meta_hooks/check_hooks_apply.py b/pre_commit/meta_hooks/check_hooks_apply.py index 84c142b4..b05a7050 100644 --- a/pre_commit/meta_hooks/check_hooks_apply.py +++ b/pre_commit/meta_hooks/check_hooks_apply.py @@ -1,7 +1,7 @@ from __future__ import annotations import argparse -from collections.abc import Sequence +from typing import Sequence import pre_commit.constants as C from pre_commit import git @@ -21,7 +21,7 @@ def check_all_hooks_match_files(config_file: str) -> int: for hook in all_hooks(config, Store()): if hook.always_run or hook.language == 'fail': continue - elif not any(classifier.filenames_for_hook(hook)): + elif not classifier.filenames_for_hook(hook): print(f'{hook.id} does not apply to this repository') retv = 1 diff --git a/pre_commit/meta_hooks/check_useless_excludes.py b/pre_commit/meta_hooks/check_useless_excludes.py index 664251a4..0a8249b8 100644 --- a/pre_commit/meta_hooks/check_useless_excludes.py +++ b/pre_commit/meta_hooks/check_useless_excludes.py @@ -2,8 +2,7 @@ from __future__ import annotations import argparse import re -from collections.abc import Iterable -from collections.abc import Sequence +from typing import Sequence from cfgv import apply_defaults @@ -15,7 +14,7 @@ from pre_commit.commands.run import Classifier def exclude_matches_any( - filenames: Iterable[str], + filenames: Sequence[str], include: str, exclude: str, ) -> bool: @@ -51,12 +50,11 @@ def check_useless_excludes(config_file: str) -> int: # Not actually a manifest dict, but this more accurately reflects # the defaults applied during runtime hook = apply_defaults(hook, MANIFEST_HOOK_DICT) - names = classifier.by_types( - classifier.filenames, - hook['types'], - hook['types_or'], - hook['exclude_types'], - ) + names = classifier.filenames + types = hook['types'] + types_or = hook['types_or'] + exclude_types = hook['exclude_types'] + names = classifier.by_types(names, types, types_or, exclude_types) include, exclude = hook['files'], hook['exclude'] if not exclude_matches_any(names, include, exclude): print( diff --git a/pre_commit/meta_hooks/identity.py b/pre_commit/meta_hooks/identity.py index 3e20bbc6..72ee440b 100644 --- a/pre_commit/meta_hooks/identity.py +++ b/pre_commit/meta_hooks/identity.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from collections.abc import Sequence +from typing import Sequence from pre_commit import output diff --git a/pre_commit/parse_shebang.py b/pre_commit/parse_shebang.py index 043a9b5d..3ee04e8d 100644 --- a/pre_commit/parse_shebang.py +++ b/pre_commit/parse_shebang.py @@ -1,7 +1,7 @@ from __future__ import annotations import os.path -from collections.abc import Mapping +from typing import Mapping from typing import NoReturn from identify.identify import parse_shebang_from_file diff --git a/pre_commit/repository.py b/pre_commit/repository.py index a9461ab6..040f238f 100644 --- a/pre_commit/repository.py +++ b/pre_commit/repository.py @@ -3,14 +3,16 @@ from __future__ import annotations import json import logging import os -from collections.abc import Sequence +import shlex from typing import Any +from typing import Sequence import pre_commit.constants as C from pre_commit.all_languages import languages from pre_commit.clientlib import load_manifest from pre_commit.clientlib import LOCAL from pre_commit.clientlib import META +from pre_commit.clientlib import parse_version from pre_commit.hook import Hook from pre_commit.lang_base import environment_dir from pre_commit.prefix import Prefix @@ -67,6 +69,14 @@ def _hook_install(hook: Hook) -> None: logger.info('Once installed this environment will be reused.') logger.info('This may take a few minutes...') + if hook.language == 'python_venv': + logger.warning( + f'`repo: {hook.src}` uses deprecated `language: python_venv`. ' + f'This is an alias for `language: python`. ' + f'Often `pre-commit autoupdate --repo {shlex.quote(hook.src)}` ' + f'will fix this.', + ) + lang = languages[hook.language] assert lang.ENVIRONMENT_DIR is not None @@ -114,6 +124,15 @@ def _hook( for dct in rest: ret.update(dct) + version = ret['minimum_pre_commit_version'] + if parse_version(version) > parse_version(C.VERSION): + logger.error( + f'The hook `{ret["id"]}` requires pre-commit version {version} ' + f'but version {C.VERSION} is installed. ' + f'Perhaps run `pip install --upgrade pre-commit`.', + ) + exit(1) + lang = ret['language'] if ret['language_version'] == C.DEFAULT: ret['language_version'] = root_config['default_language_version'][lang] diff --git a/pre_commit/resources/empty_template_pubspec.yaml b/pre_commit/resources/empty_template_pubspec.yaml index 8306aeb6..3be6ffe3 100644 --- a/pre_commit/resources/empty_template_pubspec.yaml +++ b/pre_commit/resources/empty_template_pubspec.yaml @@ -1,4 +1,4 @@ name: pre_commit_empty_pubspec environment: - sdk: '>=2.12.0' + sdk: '>=2.10.0' executables: {} diff --git a/pre_commit/resources/empty_template_setup.py b/pre_commit/resources/empty_template_setup.py index e8b1ff02..ef05eef8 100644 --- a/pre_commit/resources/empty_template_setup.py +++ b/pre_commit/resources/empty_template_setup.py @@ -1,4 +1,4 @@ from setuptools import setup -setup(name='pre-commit-placeholder-package', version='0.0.0', py_modules=[]) +setup(name='pre-commit-placeholder-package', version='0.0.0') diff --git a/pre_commit/resources/rbenv.tar.gz b/pre_commit/resources/rbenv.tar.gz index b5df0874..da2514e7 100644 Binary files a/pre_commit/resources/rbenv.tar.gz and b/pre_commit/resources/rbenv.tar.gz differ diff --git a/pre_commit/resources/ruby-build.tar.gz b/pre_commit/resources/ruby-build.tar.gz index 5c82c906..b6eacf59 100644 Binary files a/pre_commit/resources/ruby-build.tar.gz and b/pre_commit/resources/ruby-build.tar.gz differ diff --git a/pre_commit/resources/ruby-download.tar.gz b/pre_commit/resources/ruby-download.tar.gz index f7cb0b42..92502a77 100644 Binary files a/pre_commit/resources/ruby-download.tar.gz and b/pre_commit/resources/ruby-download.tar.gz differ diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py index 99ea0979..88123565 100644 --- a/pre_commit/staged_files_only.py +++ b/pre_commit/staged_files_only.py @@ -4,7 +4,7 @@ import contextlib import logging import os.path import time -from collections.abc import Generator +from typing import Generator from pre_commit import git from pre_commit.errors import FatalError @@ -33,7 +33,7 @@ def _git_apply(patch: str) -> None: @contextlib.contextmanager -def _intent_to_add_cleared() -> Generator[None]: +def _intent_to_add_cleared() -> Generator[None, None, None]: intent_to_add = git.intent_to_add_files() if intent_to_add: logger.warning('Unstaged intent-to-add files detected.') @@ -48,7 +48,7 @@ def _intent_to_add_cleared() -> Generator[None]: @contextlib.contextmanager -def _unstaged_changes_cleared(patch_dir: str) -> Generator[None]: +def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]: tree = cmd_output('git', 'write-tree')[1].strip() diff_cmd = ( 'git', 'diff-index', '--ignore-submodules', '--binary', @@ -59,11 +59,6 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None]: # There weren't any staged files so we don't need to do anything # special yield - elif retcode == 1 and not diff_stdout.strip(): - # due to behaviour (probably a bug?) in git with crlf endings and - # autocrlf set to either `true` or `input` sometimes git will refuse - # to show a crlf-only diff to us :( - yield elif retcode == 1 and diff_stdout.strip(): patch_filename = f'patch{int(time.time())}-{os.getpid()}' patch_filename = os.path.join(patch_dir, patch_filename) @@ -105,7 +100,7 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None]: @contextlib.contextmanager -def staged_files_only(patch_dir: str) -> Generator[None]: +def staged_files_only(patch_dir: str) -> Generator[None, None, None]: """Clear any unstaged changes from the git working directory inside this context. """ diff --git a/pre_commit/store.py b/pre_commit/store.py index dc90c051..487e3e79 100644 --- a/pre_commit/store.py +++ b/pre_commit/store.py @@ -5,18 +5,18 @@ import logging import os.path import sqlite3 import tempfile -from collections.abc import Callable -from collections.abc import Generator -from collections.abc import Sequence +from typing import Callable +from typing import Generator +from typing import Sequence import pre_commit.constants as C -from pre_commit import clientlib from pre_commit import file_lock from pre_commit import git from pre_commit.util import CalledProcessError from pre_commit.util import clean_path_on_failure from pre_commit.util import cmd_output_b from pre_commit.util import resource_text +from pre_commit.util import rmtree logger = logging.getLogger('pre_commit') @@ -95,13 +95,13 @@ class Store: ' PRIMARY KEY (repo, ref)' ');', ) - self._create_configs_table(db) + self._create_config_table(db) # Atomic file move os.replace(tmpfile, self.db_path) @contextlib.contextmanager - def exclusive_lock(self) -> Generator[None]: + def exclusive_lock(self) -> Generator[None, None, None]: def blocked_cb() -> None: # pragma: no cover (tests are in-process) logger.info('Locking pre-commit directory') @@ -112,7 +112,7 @@ class Store: def connect( self, db_path: str | None = None, - ) -> Generator[sqlite3.Connection]: + ) -> Generator[sqlite3.Connection, None, None]: db_path = db_path or self.db_path # sqlite doesn't close its fd with its contextmanager >.< # contextlib.closing fixes this. @@ -136,7 +136,6 @@ class Store: deps: Sequence[str], make_strategy: Callable[[str], None], ) -> str: - original_repo = repo repo = self.db_repo_name(repo, deps) def _get_result() -> str | None: @@ -169,9 +168,6 @@ class Store: 'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)', [repo, ref, directory], ) - - clientlib.warn_for_stages_on_repo_init(original_repo, directory) - return directory def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None: @@ -214,7 +210,7 @@ class Store: 'local', C.LOCAL_REPO_VERSION, deps, _make_local_repo, ) - def _create_configs_table(self, db: sqlite3.Connection) -> None: + def _create_config_table(self, db: sqlite3.Connection) -> None: db.executescript( 'CREATE TABLE IF NOT EXISTS configs (' ' path TEXT NOT NULL,' @@ -231,5 +227,28 @@ class Store: return with self.connect() as db: # TODO: eventually remove this and only create in _create - self._create_configs_table(db) + self._create_config_table(db) db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,)) + + def select_all_configs(self) -> list[str]: + with self.connect() as db: + self._create_config_table(db) + rows = db.execute('SELECT path FROM configs').fetchall() + return [path for path, in rows] + + def delete_configs(self, configs: list[str]) -> None: + with self.connect() as db: + rows = [(path,) for path in configs] + db.executemany('DELETE FROM configs WHERE path = ?', rows) + + def select_all_repos(self) -> list[tuple[str, str, str]]: + with self.connect() as db: + return db.execute('SELECT repo, ref, path from repos').fetchall() + + def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None: + with self.connect() as db: + db.execute( + 'DELETE FROM repos WHERE repo = ? and ref = ?', + (db_repo_name, ref), + ) + rmtree(path) diff --git a/pre_commit/util.py b/pre_commit/util.py index 19b1880b..4f8e8357 100644 --- a/pre_commit/util.py +++ b/pre_commit/util.py @@ -8,10 +8,10 @@ import shutil import stat import subprocess import sys -from collections.abc import Callable -from collections.abc import Generator from types import TracebackType from typing import Any +from typing import Callable +from typing import Generator from pre_commit import parse_shebang @@ -25,7 +25,7 @@ def force_bytes(exc: Any) -> bytes: @contextlib.contextmanager -def clean_path_on_failure(path: str) -> Generator[None]: +def clean_path_on_failure(path: str) -> Generator[None, None, None]: """Cleans up the directory on an exceptional failure.""" try: yield @@ -36,8 +36,7 @@ def clean_path_on_failure(path: str) -> Generator[None]: def resource_text(filename: str) -> str: - files = importlib.resources.files('pre_commit.resources') - return files.joinpath(filename).read_text() + return importlib.resources.read_text('pre_commit.resources', filename) def make_executable(filename: str) -> None: @@ -202,37 +201,24 @@ else: # pragma: no cover cmd_output_p = cmd_output_b -def _handle_readonly( - func: Callable[[str], object], - path: str, - exc: BaseException, -) -> None: - if ( - func in (os.rmdir, os.remove, os.unlink) and - isinstance(exc, OSError) and - exc.errno in {errno.EACCES, errno.EPERM} - ): - for p in (path, os.path.dirname(path)): - os.chmod(p, os.stat(p).st_mode | stat.S_IWUSR) - func(path) - else: - raise - - -if sys.version_info < (3, 12): # pragma: <3.12 cover - def _handle_readonly_old( - func: Callable[[str], object], - path: str, - excinfo: tuple[type[BaseException], BaseException, TracebackType], +def rmtree(path: str) -> None: + """On windows, rmtree fails for readonly dirs.""" + def handle_remove_readonly( + func: Callable[..., Any], + path: str, + exc: tuple[type[OSError], OSError, TracebackType], ) -> None: - return _handle_readonly(func, path, excinfo[1]) - - def rmtree(path: str) -> None: - shutil.rmtree(path, ignore_errors=False, onerror=_handle_readonly_old) -else: # pragma: >=3.12 cover - def rmtree(path: str) -> None: - """On windows, rmtree fails for readonly dirs.""" - shutil.rmtree(path, ignore_errors=False, onexc=_handle_readonly) + excvalue = exc[1] + if ( + func in (os.rmdir, os.remove, os.unlink) and + excvalue.errno in {errno.EACCES, errno.EPERM} + ): + for p in (path, os.path.dirname(path)): + os.chmod(p, os.stat(p).st_mode | stat.S_IWUSR) + func(path) + else: + raise + shutil.rmtree(path, ignore_errors=False, onerror=handle_remove_readonly) def win_exe(s: str) -> str: diff --git a/pre_commit/xargs.py b/pre_commit/xargs.py index 7c98d167..e3af90ef 100644 --- a/pre_commit/xargs.py +++ b/pre_commit/xargs.py @@ -3,16 +3,15 @@ from __future__ import annotations import concurrent.futures import contextlib import math -import multiprocessing import os import subprocess import sys -from collections.abc import Callable -from collections.abc import Generator -from collections.abc import Iterable -from collections.abc import MutableMapping -from collections.abc import Sequence from typing import Any +from typing import Callable +from typing import Generator +from typing import Iterable +from typing import MutableMapping +from typing import Sequence from typing import TypeVar from pre_commit import parse_shebang @@ -23,21 +22,6 @@ TArg = TypeVar('TArg') TRet = TypeVar('TRet') -def cpu_count() -> int: - try: - # On systems that support it, this will return a more accurate count of - # usable CPUs for the current process, which will take into account - # cgroup limits - return len(os.sched_getaffinity(0)) - except AttributeError: - pass - - try: - return multiprocessing.cpu_count() - except NotImplementedError: - return 1 - - def _environ_size(_env: MutableMapping[str, str] | None = None) -> int: environ = _env if _env is not None else getattr(os, 'environb', os.environ) size = 8 * len(environ) # number of pointers in `envp` @@ -120,6 +104,7 @@ def partition( @contextlib.contextmanager def _thread_mapper(maxsize: int) -> Generator[ Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]], + None, None, ]: if maxsize == 1: yield map @@ -177,8 +162,7 @@ def xargs( results = thread_map(run_cmd_partition, partitions) for proc_retcode, proc_out, _ in results: - if abs(proc_retcode) > abs(retcode): - retcode = proc_retcode + retcode = max(retcode, proc_retcode) stdout += proc_out return retcode, stdout diff --git a/pre_commit/yaml.py b/pre_commit/yaml.py index a5bbbc99..bdf4ec47 100644 --- a/pre_commit/yaml.py +++ b/pre_commit/yaml.py @@ -6,7 +6,6 @@ from typing import Any import yaml Loader = getattr(yaml, 'CSafeLoader', yaml.SafeLoader) -yaml_compose = functools.partial(yaml.compose, Loader=Loader) yaml_load = functools.partial(yaml.load, Loader=Loader) Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper) diff --git a/pre_commit/yaml_rewrite.py b/pre_commit/yaml_rewrite.py deleted file mode 100644 index 8d0e8fdb..00000000 --- a/pre_commit/yaml_rewrite.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -from collections.abc import Generator -from collections.abc import Iterable -from typing import NamedTuple -from typing import Protocol - -from yaml.nodes import MappingNode -from yaml.nodes import Node -from yaml.nodes import ScalarNode -from yaml.nodes import SequenceNode - - -class _Matcher(Protocol): - def match(self, n: Node) -> Generator[Node]: ... - - -class MappingKey(NamedTuple): - k: str - - def match(self, n: Node) -> Generator[Node]: - if isinstance(n, MappingNode): - for k, _ in n.value: - if k.value == self.k: - yield k - - -class MappingValue(NamedTuple): - k: str - - def match(self, n: Node) -> Generator[Node]: - if isinstance(n, MappingNode): - for k, v in n.value: - if k.value == self.k: - yield v - - -class SequenceItem(NamedTuple): - def match(self, n: Node) -> Generator[Node]: - if isinstance(n, SequenceNode): - yield from n.value - - -def _match(gen: Iterable[Node], m: _Matcher) -> Iterable[Node]: - return (n for src in gen for n in m.match(src)) - - -def match(n: Node, matcher: tuple[_Matcher, ...]) -> Generator[ScalarNode]: - gen: Iterable[Node] = (n,) - for m in matcher: - gen = _match(gen, m) - return (n for n in gen if isinstance(n, ScalarNode)) diff --git a/setup.cfg b/setup.cfg index a95ee447..89e8e4ad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = pre_commit -version = 4.5.1 +version = 3.2.2 description = A framework for managing and maintaining multi-language pre-commit hooks. long_description = file: README.md long_description_content_type = text/markdown @@ -8,8 +8,9 @@ url = https://github.com/pre-commit/pre-commit author = Anthony Sottile author_email = asottile@umich.edu license = MIT -license_files = LICENSE +license_file = LICENSE classifiers = + License :: OSI Approved :: MIT License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation :: CPython @@ -23,7 +24,7 @@ install_requires = nodeenv>=0.11.1 pyyaml>=5.1 virtualenv>=20.10.0 -python_requires = >=3.10 +python_requires = >=3.8 [options.packages.find] exclude = @@ -52,7 +53,6 @@ check_untyped_defs = true disallow_any_generics = true disallow_incomplete_defs = true disallow_untyped_defs = true -enable_error_code = deprecated warn_redundant_casts = true warn_unused_ignores = true diff --git a/testing/get-dart.sh b/testing/get-dart.sh index b4545e71..998b9d98 100755 --- a/testing/get-dart.sh +++ b/testing/get-dart.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -euo pipefail -VERSION=2.19.6 +VERSION=2.13.4 if [ "$OSTYPE" = msys ]; then URL="https://storage.googleapis.com/dart-archive/channels/stable/release/${VERSION}/sdk/dartsdk-windows-x64-release.zip" diff --git a/testing/get-swift.sh b/testing/get-swift.sh new file mode 100755 index 00000000..dfe09391 --- /dev/null +++ b/testing/get-swift.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +# This is a script used in CI to install swift +set -euo pipefail + +. /etc/lsb-release +if [ "$DISTRIB_CODENAME" = "jammy" ]; then + SWIFT_URL='https://download.swift.org/swift-5.7.1-release/ubuntu2204/swift-5.7.1-RELEASE/swift-5.7.1-RELEASE-ubuntu22.04.tar.gz' + SWIFT_HASH='7f60291f5088d3e77b0c2364beaabd29616ee7b37260b7b06bdbeb891a7fe161' +else + echo "unknown dist: ${DISTRIB_CODENAME}" 1>&2 + exit 1 +fi + +check() { + echo "$SWIFT_HASH $TGZ" | sha256sum --check +} + +TGZ="$HOME/.swift/swift.tar.gz" +mkdir -p "$(dirname "$TGZ")" +if ! check >& /dev/null; then + rm -f "$TGZ" + curl --location --silent --output "$TGZ" "$SWIFT_URL" + check +fi + +mkdir -p /tmp/swift +tar -xf "$TGZ" --strip 1 --directory /tmp/swift + +echo '/tmp/swift/usr/bin' >> "$GITHUB_PATH" diff --git a/testing/language_helpers.py b/testing/language_helpers.py index 05c94ebc..ead8dae2 100644 --- a/testing/language_helpers.py +++ b/testing/language_helpers.py @@ -1,7 +1,7 @@ from __future__ import annotations import os -from collections.abc import Sequence +from typing import Sequence from pre_commit.lang_base import Language from pre_commit.prefix import Prefix diff --git a/testing/languages b/testing/languages index f4804c7e..5e8fc9e4 100755 --- a/testing/languages +++ b/testing/languages @@ -16,15 +16,6 @@ EXCLUDED = frozenset(( )) -def _always_run() -> frozenset[str]: - ret = ['.github/workflows/languages.yaml', 'testing/languages'] - ret.extend( - os.path.join('pre_commit/resources', fname) - for fname in os.listdir('pre_commit/resources') - ) - return frozenset(ret) - - def _lang_files(lang: str) -> frozenset[str]: prog = f'''\ import json @@ -56,14 +47,10 @@ def main() -> int: if fname.endswith('.py') and fname != '__init__.py' ] - triggers_all = _always_run() - for fname in triggers_all: - assert os.path.exists(fname), fname - if not args.all: with concurrent.futures.ThreadPoolExecutor(os.cpu_count()) as exe: by_lang = { - lang: files | triggers_all + lang: files for lang, files in zip(langs, exe.map(_lang_files, langs)) } diff --git a/testing/make-archives b/testing/make-archives index 10f40a3a..cec9a9ff 100755 --- a/testing/make-archives +++ b/testing/make-archives @@ -8,7 +8,7 @@ import shutil import subprocess import tarfile import tempfile -from collections.abc import Sequence +from typing import Sequence # This is a script for generating the tarred resources for git repo @@ -16,8 +16,8 @@ from collections.abc import Sequence REPOS = ( - ('rbenv', 'https://github.com/rbenv/rbenv', '10e96bfc'), - ('ruby-build', 'https://github.com/rbenv/ruby-build', '447468b1'), + ('rbenv', 'https://github.com/rbenv/rbenv', '38e1fbb'), + ('ruby-build', 'https://github.com/rbenv/ruby-build', '9d92a69'), ( 'ruby-download', 'https://github.com/garnieretienne/rvm-download', @@ -57,7 +57,8 @@ def make_archive(name: str, repo: str, ref: str, destdir: str) -> str: arcs.sort() with gzip.GzipFile(output_path, 'wb', mtime=0) as gzipf: - with tarfile.open(fileobj=gzipf, mode='w') as tf: + # https://github.com/python/typeshed/issues/5491 + with tarfile.open(fileobj=gzipf, mode='w') as tf: # type: ignore for arcname, abspath in arcs: tf.add( abspath, diff --git a/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml new file mode 100644 index 00000000..2c237009 --- /dev/null +++ b/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml @@ -0,0 +1,6 @@ +- id: python3-hook + name: Python 3 Hook + entry: python3-hook + language: python + language_version: python3 + files: \.py$ diff --git a/testing/resources/python3_hooks_repo/py3_hook.py b/testing/resources/python3_hooks_repo/py3_hook.py new file mode 100644 index 00000000..8c9cda4c --- /dev/null +++ b/testing/resources/python3_hooks_repo/py3_hook.py @@ -0,0 +1,8 @@ +import sys + + +def main(): + print(sys.version_info[0]) + print(repr(sys.argv[1:])) + print('Hello World') + return 0 diff --git a/testing/resources/python3_hooks_repo/setup.py b/testing/resources/python3_hooks_repo/setup.py new file mode 100644 index 00000000..9125dc1d --- /dev/null +++ b/testing/resources/python3_hooks_repo/setup.py @@ -0,0 +1,8 @@ +from setuptools import setup + +setup( + name='python3_hook', + version='0.0.0', + py_modules=['py3_hook'], + entry_points={'console_scripts': ['python3-hook = py3_hook:main']}, +) diff --git a/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml b/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml new file mode 100644 index 00000000..b2c347c1 --- /dev/null +++ b/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml @@ -0,0 +1,5 @@ +- id: system-hook-with-spaces + name: System hook with spaces + entry: bash -c 'echo "Hello World"' + language: system + files: \.sh$ diff --git a/testing/util.py b/testing/util.py index 1646ccd2..08d52cbc 100644 --- a/testing/util.py +++ b/testing/util.py @@ -40,7 +40,6 @@ def run_opts( color=False, verbose=False, hook=None, - fail_fast=False, remote_branch='', local_branch='', from_ref='', @@ -66,7 +65,6 @@ def run_opts( color=color, verbose=verbose, hook=hook, - fail_fast=fail_fast, remote_branch=remote_branch, local_branch=local_branch, from_ref=from_ref, diff --git a/testing/zipapp/Dockerfile b/testing/zipapp/Dockerfile index ea967e38..7c74c1b2 100644 --- a/testing/zipapp/Dockerfile +++ b/testing/zipapp/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:jammy +FROM ubuntu:focal RUN : \ && apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ @@ -11,4 +11,4 @@ RUN : \ ENV LANG=C.UTF-8 PATH=/venv/bin:$PATH RUN : \ && python3 -mvenv /venv \ - && pip install --no-cache-dir pip distlib no-manylinux --upgrade + && pip install --no-cache-dir pip setuptools wheel no-manylinux --upgrade diff --git a/testing/zipapp/make b/testing/zipapp/make index 43bb4373..37b5c355 100755 --- a/testing/zipapp/make +++ b/testing/zipapp/make @@ -4,6 +4,7 @@ from __future__ import annotations import argparse import base64 import hashlib +import importlib.resources import io import os.path import shutil @@ -41,17 +42,10 @@ def _add_shim(dest: str) -> None: with zipfile.ZipFile(bio, 'w') as zipf: zipf.write(shim, arcname='__main__.py') - with tempfile.TemporaryDirectory() as tmpdir: - _exit_if_retv( - 'podman', 'run', '--rm', '--volume', f'{tmpdir}:/out:rw', IMG, - 'cp', '/venv/lib/python3.10/site-packages/distlib/t32.exe', '/out', - ) - - with open(os.path.join(dest, 'python.exe'), 'wb') as f: - with open(os.path.join(tmpdir, 't32.exe'), 'rb') as t32: - f.write(t32.read()) - f.write(b'#!py.exe -3\n') - f.write(bio.getvalue()) + with open(os.path.join(dest, 'python.exe'), 'wb') as f: + f.write(importlib.resources.read_binary('distlib', 't32.exe')) + f.write(b'#!py.exe -3\n') + f.write(bio.getvalue()) def _write_cache_key(version: str, wheeldir: str, dest: str) -> None: @@ -107,6 +101,9 @@ def main() -> int: shebang = '/usr/bin/env python3' zipapp.create_archive(tmpdir, filename, interpreter=shebang) + with open(f'{filename}.sha256sum', 'w') as f: + subprocess.check_call(('sha256sum', filename), stdout=f) + return 0 diff --git a/tests/all_languages_test.py b/tests/all_languages_test.py new file mode 100644 index 00000000..98c91215 --- /dev/null +++ b/tests/all_languages_test.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +from pre_commit.all_languages import languages + + +def test_python_venv_is_an_alias_to_python(): + assert languages['python_venv'] is languages['python'] diff --git a/tests/clientlib_test.py b/tests/clientlib_test.py index 2c42b80c..568b2e97 100644 --- a/tests/clientlib_test.py +++ b/tests/clientlib_test.py @@ -12,8 +12,6 @@ from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION -from pre_commit.clientlib import InvalidManifestError -from pre_commit.clientlib import load_manifest from pre_commit.clientlib import MANIFEST_HOOK_DICT from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import META_HOOK_DICT @@ -42,51 +40,56 @@ def test_check_type_tag_success(): @pytest.mark.parametrize( - 'cfg', - ( - { - 'repos': [{ - 'repo': 'git@github.com:pre-commit/pre-commit-hooks', - 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', - 'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}], - }], - }, - { - 'repos': [{ - 'repo': 'git@github.com:pre-commit/pre-commit-hooks', - 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', - 'hooks': [ - { - 'id': 'pyflakes', - 'files': '\\.py$', - 'args': ['foo', 'bar', 'baz'], - }, - ], - }], - }, + ('config_obj', 'expected'), ( + ( + { + 'repos': [{ + 'repo': 'git@github.com:pre-commit/pre-commit-hooks', + 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', + 'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}], + }], + }, + True, + ), + ( + { + 'repos': [{ + 'repo': 'git@github.com:pre-commit/pre-commit-hooks', + 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', + 'hooks': [ + { + 'id': 'pyflakes', + 'files': '\\.py$', + 'args': ['foo', 'bar', 'baz'], + }, + ], + }], + }, + True, + ), + ( + { + 'repos': [{ + 'repo': 'git@github.com:pre-commit/pre-commit-hooks', + 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', + 'hooks': [ + { + 'id': 'pyflakes', + 'files': '\\.py$', + # Exclude pattern must be a string + 'exclude': 0, + 'args': ['foo', 'bar', 'baz'], + }, + ], + }], + }, + False, + ), ), ) -def test_config_valid(cfg): - assert is_valid_according_to_schema(cfg, CONFIG_SCHEMA) - - -def test_invalid_config_wrong_type(): - cfg = { - 'repos': [{ - 'repo': 'git@github.com:pre-commit/pre-commit-hooks', - 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', - 'hooks': [ - { - 'id': 'pyflakes', - 'files': '\\.py$', - # Exclude pattern must be a string - 'exclude': 0, - 'args': ['foo', 'bar', 'baz'], - }, - ], - }], - } - assert not is_valid_according_to_schema(cfg, CONFIG_SCHEMA) +def test_config_valid(config_obj, expected): + ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) + assert ret is expected def test_local_hooks_with_rev_fails(): @@ -195,13 +198,14 @@ def test_warn_mutable_rev_conditional(): ), ) def test_sensible_regex_validators_dont_pass_none(validator_cls): - validator = validator_cls('files', cfgv.check_string) + key = 'files' with pytest.raises(cfgv.ValidationError) as excinfo: - validator.check({'files': None}) + validator = validator_cls(key, cfgv.check_string) + validator.check({key: None}) assert str(excinfo.value) == ( '\n' - '==> At key: files' + f'==> At key: {key}' '\n' '=====> Expected string got NoneType' ) @@ -258,24 +262,6 @@ def test_validate_optional_sensible_regex_at_local_hook(caplog): ] -def test_validate_optional_sensible_regex_at_meta_hook(caplog): - config_obj = { - 'repo': 'meta', - 'hooks': [{'id': 'identity', 'files': 'dir/*.py'}], - } - - cfgv.validate(config_obj, CONFIG_REPO_DICT) - - assert caplog.record_tuples == [ - ( - 'pre_commit', - logging.WARNING, - "The 'files' field in hook 'identity' is a regex, not a glob " - "-- matching '/*' probably isn't what you want here", - ), - ] - - @pytest.mark.parametrize( ('regex', 'warning'), ( @@ -311,128 +297,47 @@ def test_validate_optional_sensible_regex_at_top_level(caplog, regex, warning): assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)] -def test_invalid_stages_error(): - cfg = {'repos': [sample_local_config()]} - cfg['repos'][0]['hooks'][0]['stages'] = ['invalid'] - - with pytest.raises(cfgv.ValidationError) as excinfo: - cfgv.validate(cfg, CONFIG_SCHEMA) - - assert str(excinfo.value) == ( - '\n' - '==> At Config()\n' - '==> At key: repos\n' - "==> At Repository(repo='local')\n" - '==> At key: hooks\n' - "==> At Hook(id='do_not_commit')\n" - # this line was missing due to the custom validator - '==> At key: stages\n' - '==> At index 0\n' - "=====> Expected one of commit-msg, manual, post-checkout, post-commit, post-merge, post-rewrite, pre-commit, pre-merge-commit, pre-push, pre-rebase, prepare-commit-msg but got: 'invalid'" # noqa: E501 - ) - - -def test_warning_for_deprecated_stages(caplog): - config_obj = sample_local_config() - config_obj['hooks'][0]['stages'] = ['commit', 'push'] - - cfgv.validate(config_obj, CONFIG_REPO_DICT) - - assert caplog.record_tuples == [ - ( - 'pre_commit', - logging.WARNING, - 'hook id `do_not_commit` uses deprecated stage names ' - '(commit, push) which will be removed in a future version. ' - 'run: `pre-commit migrate-config` to automatically fix this.', - ), - ] - - -def test_no_warning_for_non_deprecated_stages(caplog): - config_obj = sample_local_config() - config_obj['hooks'][0]['stages'] = ['pre-commit', 'pre-push'] - - cfgv.validate(config_obj, CONFIG_REPO_DICT) - - assert caplog.record_tuples == [] - - -def test_warning_for_deprecated_default_stages(caplog): - cfg = {'default_stages': ['commit', 'push'], 'repos': []} - - cfgv.validate(cfg, CONFIG_SCHEMA) - - assert caplog.record_tuples == [ - ( - 'pre_commit', - logging.WARNING, - 'top-level `default_stages` uses deprecated stage names ' - '(commit, push) which will be removed in a future version. ' - 'run: `pre-commit migrate-config` to automatically fix this.', - ), - ] - - -def test_no_warning_for_non_deprecated_default_stages(caplog): - cfg = {'default_stages': ['pre-commit', 'pre-push'], 'repos': []} - - cfgv.validate(cfg, CONFIG_SCHEMA) - - assert caplog.record_tuples == [] - - -def test_unsupported_language_migration(): - cfg = {'repos': [sample_local_config(), sample_local_config()]} - cfg['repos'][0]['hooks'][0]['language'] = 'system' - cfg['repos'][1]['hooks'][0]['language'] = 'script' - - cfgv.validate(cfg, CONFIG_SCHEMA) - ret = cfgv.apply_defaults(cfg, CONFIG_SCHEMA) - - assert ret['repos'][0]['hooks'][0]['language'] == 'unsupported' - assert ret['repos'][1]['hooks'][0]['language'] == 'unsupported_script' - - -def test_unsupported_language_migration_language_required(): - cfg = {'repos': [sample_local_config()]} - del cfg['repos'][0]['hooks'][0]['language'] - - with pytest.raises(cfgv.ValidationError): - cfgv.validate(cfg, CONFIG_SCHEMA) - - @pytest.mark.parametrize( - 'manifest_obj', + ('manifest_obj', 'expected'), ( - [{ - 'id': 'a', - 'name': 'b', - 'entry': 'c', - 'language': 'python', - 'files': r'\.py$', - }], - [{ - 'id': 'a', - 'name': 'b', - 'entry': 'c', - 'language': 'python', - 'language_version': 'python3.4', - 'files': r'\.py$', - }], - # A regression in 0.13.5: always_run and files are permissible - [{ - 'id': 'a', - 'name': 'b', - 'entry': 'c', - 'language': 'python', - 'files': '', - 'always_run': True, - }], + ( + [{ + 'id': 'a', + 'name': 'b', + 'entry': 'c', + 'language': 'python', + 'files': r'\.py$', + }], + True, + ), + ( + [{ + 'id': 'a', + 'name': 'b', + 'entry': 'c', + 'language': 'python', + 'language_version': 'python3.4', + 'files': r'\.py$', + }], + True, + ), + ( + # A regression in 0.13.5: always_run and files are permissible + [{ + 'id': 'a', + 'name': 'b', + 'entry': 'c', + 'language': 'python', + 'files': '', + 'always_run': True, + }], + True, + ), ), ) -def test_valid_manifests(manifest_obj): - assert is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) +def test_valid_manifests(manifest_obj, expected): + ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) + assert ret is expected @pytest.mark.parametrize( @@ -488,39 +393,8 @@ def test_parse_version(): def test_minimum_pre_commit_version_failing(): - cfg = {'repos': [], 'minimum_pre_commit_version': '999'} - with pytest.raises(cfgv.ValidationError) as excinfo: - cfgv.validate(cfg, CONFIG_SCHEMA) - assert str(excinfo.value) == ( - f'\n' - f'==> At Config()\n' - f'==> At key: minimum_pre_commit_version\n' - f'=====> pre-commit version 999 is required but version {C.VERSION} ' - f'is installed. Perhaps run `pip install --upgrade pre-commit`.' - ) - - -def test_minimum_pre_commit_version_failing_in_config(): - cfg = {'repos': [sample_local_config()]} - cfg['repos'][0]['hooks'][0]['minimum_pre_commit_version'] = '999' - with pytest.raises(cfgv.ValidationError) as excinfo: - cfgv.validate(cfg, CONFIG_SCHEMA) - assert str(excinfo.value) == ( - f'\n' - f'==> At Config()\n' - f'==> At key: repos\n' - f"==> At Repository(repo='local')\n" - f'==> At key: hooks\n' - f"==> At Hook(id='do_not_commit')\n" - f'==> At key: minimum_pre_commit_version\n' - f'=====> pre-commit version 999 is required but version {C.VERSION} ' - f'is installed. Perhaps run `pip install --upgrade pre-commit`.' - ) - - -def test_minimum_pre_commit_version_failing_before_other_error(): - cfg = {'repos': 5, 'minimum_pre_commit_version': '999'} with pytest.raises(cfgv.ValidationError) as excinfo: + cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( f'\n' @@ -590,18 +464,3 @@ def test_config_hook_stages_defaulting(): 'id': 'fake-hook', 'stages': ['commit-msg', 'pre-push', 'pre-commit', 'pre-merge-commit'], } - - -def test_manifest_v5_forward_compat(tmp_path): - manifest = tmp_path.joinpath('.pre-commit-hooks.yaml') - manifest.write_text('hooks: {}') - - with pytest.raises(InvalidManifestError) as excinfo: - load_manifest(manifest) - assert str(excinfo.value) == ( - f'\n' - f'==> File {manifest}\n' - f'=====> \n' - f'=====> pre-commit version 5 is required but version {C.VERSION} ' - f'is installed. Perhaps run `pip install --upgrade pre-commit`.' - ) diff --git a/tests/commands/autoupdate_test.py b/tests/commands/autoupdate_test.py index 71bd0444..4bcb5d82 100644 --- a/tests/commands/autoupdate_test.py +++ b/tests/commands/autoupdate_test.py @@ -67,7 +67,7 @@ def test_rev_info_from_config(): def test_rev_info_update_up_to_date_repo(up_to_date): config = make_config_from_repo(up_to_date) - info = RevInfo.from_config(config)._replace(hook_ids=frozenset(('foo',))) + info = RevInfo.from_config(config) new_info = info.update(tags_only=False, freeze=False) assert info == new_info @@ -139,7 +139,7 @@ def test_rev_info_update_does_not_freeze_if_already_sha(out_of_date): assert new_info.frozen is None -def test_autoupdate_up_to_date_repo(up_to_date, tmpdir): +def test_autoupdate_up_to_date_repo(up_to_date, tmpdir, store): contents = ( f'repos:\n' f'- repo: {up_to_date}\n' @@ -150,11 +150,11 @@ def test_autoupdate_up_to_date_repo(up_to_date, tmpdir): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(contents) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 assert cfg.read() == contents -def test_autoupdate_old_revision_broken(tempdir_factory, in_tmpdir): +def test_autoupdate_old_revision_broken(tempdir_factory, in_tmpdir, store): """In $FUTURE_VERSION, hooks.yaml will no longer be supported. This asserts that when that day comes, pre-commit will be able to autoupdate despite not being able to read hooks.yaml in that repository. @@ -174,14 +174,14 @@ def test_autoupdate_old_revision_broken(tempdir_factory, in_tmpdir): write_config('.', config) with open(C.CONFIG_FILE) as f: before = f.read() - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0 with open(C.CONFIG_FILE) as f: after = f.read() assert before != after assert update_rev in after -def test_autoupdate_out_of_date_repo(out_of_date, tmpdir): +def test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store): fmt = ( 'repos:\n' '- repo: {}\n' @@ -192,24 +192,24 @@ def test_autoupdate_out_of_date_repo(out_of_date, tmpdir): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev)) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 assert cfg.read() == fmt.format(out_of_date.path, out_of_date.head_rev) -def test_autoupdate_with_core_useBuiltinFSMonitor(out_of_date, tmpdir): +def test_autoupdate_with_core_useBuiltinFSMonitor(out_of_date, tmpdir, store): # force the setting on "globally" for git home = tmpdir.join('fakehome').ensure_dir() home.join('.gitconfig').write('[core]\nuseBuiltinFSMonitor = true\n') with envcontext.envcontext((('HOME', str(home)),)): - test_autoupdate_out_of_date_repo(out_of_date, tmpdir) + test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store) -def test_autoupdate_pure_yaml(out_of_date, tmpdir): +def test_autoupdate_pure_yaml(out_of_date, tmpdir, store): with mock.patch.object(yaml, 'Dumper', yaml.yaml.SafeDumper): - test_autoupdate_out_of_date_repo(out_of_date, tmpdir) + test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store) -def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir): +def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir, store): fmt = ( 'repos:\n' '- repo: {}\n' @@ -228,7 +228,7 @@ def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir): ) cfg.write(before) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 assert cfg.read() == fmt.format( up_to_date, git.head_rev(up_to_date), out_of_date.path, out_of_date.head_rev, @@ -236,7 +236,7 @@ def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir): def test_autoupdate_out_of_date_repo_with_correct_repo_name( - out_of_date, in_tmpdir, + out_of_date, in_tmpdir, store, ): stale_config = make_config_from_repo( out_of_date.path, rev=out_of_date.original_rev, check=False, @@ -249,7 +249,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name( before = f.read() repo_name = f'file://{out_of_date.path}' ret = autoupdate( - C.CONFIG_FILE, freeze=False, tags_only=False, + C.CONFIG_FILE, store, freeze=False, tags_only=False, repos=(repo_name,), ) with open(C.CONFIG_FILE) as f: @@ -261,7 +261,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name( def test_autoupdate_out_of_date_repo_with_wrong_repo_name( - out_of_date, in_tmpdir, + out_of_date, in_tmpdir, store, ): config = make_config_from_repo( out_of_date.path, rev=out_of_date.original_rev, check=False, @@ -272,7 +272,7 @@ def test_autoupdate_out_of_date_repo_with_wrong_repo_name( before = f.read() # It will not update it, because the name doesn't match ret = autoupdate( - C.CONFIG_FILE, freeze=False, tags_only=False, + C.CONFIG_FILE, store, freeze=False, tags_only=False, repos=('dne',), ) with open(C.CONFIG_FILE) as f: @@ -281,7 +281,7 @@ def test_autoupdate_out_of_date_repo_with_wrong_repo_name( assert before == after -def test_does_not_reformat(tmpdir, out_of_date): +def test_does_not_reformat(tmpdir, out_of_date, store): fmt = ( 'repos:\n' '- repo: {}\n' @@ -294,12 +294,12 @@ def test_does_not_reformat(tmpdir, out_of_date): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev)) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 expected = fmt.format(out_of_date.path, out_of_date.head_rev) assert cfg.read() == expected -def test_does_not_change_mixed_endlines_read(up_to_date, tmpdir): +def test_does_not_change_mixed_endlines_read(up_to_date, tmpdir, store): fmt = ( 'repos:\n' '- repo: {}\n' @@ -314,11 +314,11 @@ def test_does_not_change_mixed_endlines_read(up_to_date, tmpdir): expected = fmt.format(up_to_date, git.head_rev(up_to_date)).encode() cfg.write_binary(expected) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 assert cfg.read_binary() == expected -def test_does_not_change_mixed_endlines_write(tmpdir, out_of_date): +def test_does_not_change_mixed_endlines_write(tmpdir, out_of_date, store): fmt = ( 'repos:\n' '- repo: {}\n' @@ -333,12 +333,12 @@ def test_does_not_change_mixed_endlines_write(tmpdir, out_of_date): fmt.format(out_of_date.path, out_of_date.original_rev).encode(), ) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 expected = fmt.format(out_of_date.path, out_of_date.head_rev).encode() assert cfg.read_binary() == expected -def test_loses_formatting_when_not_detectable(out_of_date, tmpdir): +def test_loses_formatting_when_not_detectable(out_of_date, store, tmpdir): """A best-effort attempt is made at updating rev without rewriting formatting. When the original formatting cannot be detected, this is abandoned. @@ -359,7 +359,7 @@ def test_loses_formatting_when_not_detectable(out_of_date, tmpdir): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(config) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 expected = ( f'repos:\n' f'- repo: {out_of_date.path}\n' @@ -370,43 +370,43 @@ def test_loses_formatting_when_not_detectable(out_of_date, tmpdir): assert cfg.read() == expected -def test_autoupdate_tagged_repo(tagged, in_tmpdir): +def test_autoupdate_tagged_repo(tagged, in_tmpdir, store): config = make_config_from_repo(tagged.path, rev=tagged.original_rev) write_config('.', config) - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0 with open(C.CONFIG_FILE) as f: assert 'v1.2.3' in f.read() -def test_autoupdate_freeze(tagged, in_tmpdir): +def test_autoupdate_freeze(tagged, in_tmpdir, store): config = make_config_from_repo(tagged.path, rev=tagged.original_rev) write_config('.', config) - assert autoupdate(C.CONFIG_FILE, freeze=True, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0 with open(C.CONFIG_FILE) as f: expected = f'rev: {tagged.head_rev} # frozen: v1.2.3' assert expected in f.read() # if we un-freeze it should remove the frozen comment - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0 with open(C.CONFIG_FILE) as f: assert 'rev: v1.2.3\n' in f.read() -def test_autoupdate_tags_only(tagged, in_tmpdir): +def test_autoupdate_tags_only(tagged, in_tmpdir, store): # add some commits after the tag git_commit(cwd=tagged.path) config = make_config_from_repo(tagged.path, rev=tagged.original_rev) write_config('.', config) - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=True) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=True) == 0 with open(C.CONFIG_FILE) as f: assert 'v1.2.3' in f.read() -def test_autoupdate_latest_no_config(out_of_date, in_tmpdir): +def test_autoupdate_latest_no_config(out_of_date, in_tmpdir, store): config = make_config_from_repo( out_of_date.path, rev=out_of_date.original_rev, ) @@ -415,12 +415,12 @@ def test_autoupdate_latest_no_config(out_of_date, in_tmpdir): cmd_output('git', 'rm', '-r', ':/', cwd=out_of_date.path) git_commit(cwd=out_of_date.path) - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 1 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 1 with open(C.CONFIG_FILE) as f: assert out_of_date.original_rev in f.read() -def test_hook_disppearing_repo_raises(hook_disappearing): +def test_hook_disppearing_repo_raises(hook_disappearing, store): config = make_config_from_repo( hook_disappearing.path, rev=hook_disappearing.original_rev, @@ -428,10 +428,10 @@ def test_hook_disppearing_repo_raises(hook_disappearing): ) info = RevInfo.from_config(config).update(tags_only=False, freeze=False) with pytest.raises(RepositoryCannotBeUpdatedError): - _check_hooks_still_exist_at_rev(config, info) + _check_hooks_still_exist_at_rev(config, info, store) -def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir): +def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir, store): contents = ( f'repos:\n' f'- repo: {hook_disappearing.path}\n' @@ -442,21 +442,21 @@ def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(contents) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 1 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 1 assert cfg.read() == contents -def test_autoupdate_local_hooks(in_git_dir): +def test_autoupdate_local_hooks(in_git_dir, store): config = sample_local_config() add_config_to_repo('.', config) - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0 new_config_written = read_config('.') assert len(new_config_written['repos']) == 1 assert new_config_written['repos'][0] == config def test_autoupdate_local_hooks_with_out_of_date_repo( - out_of_date, in_tmpdir, + out_of_date, in_tmpdir, store, ): stale_config = make_config_from_repo( out_of_date.path, rev=out_of_date.original_rev, check=False, @@ -464,13 +464,13 @@ def test_autoupdate_local_hooks_with_out_of_date_repo( local_config = sample_local_config() config = {'repos': [local_config, stale_config]} write_config('.', config) - assert autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) == 0 + assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0 new_config_written = read_config('.') assert len(new_config_written['repos']) == 2 assert new_config_written['repos'][0] == local_config -def test_autoupdate_meta_hooks(tmpdir): +def test_autoupdate_meta_hooks(tmpdir, store): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write( 'repos:\n' @@ -478,7 +478,7 @@ def test_autoupdate_meta_hooks(tmpdir): ' hooks:\n' ' - id: check-useless-excludes\n', ) - assert autoupdate(str(cfg), freeze=False, tags_only=True) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0 assert cfg.read() == ( 'repos:\n' '- repo: meta\n' @@ -487,7 +487,7 @@ def test_autoupdate_meta_hooks(tmpdir): ) -def test_updates_old_format_to_new_format(tmpdir, capsys): +def test_updates_old_format_to_new_format(tmpdir, capsys, store): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write( '- repo: local\n' @@ -497,7 +497,7 @@ def test_updates_old_format_to_new_format(tmpdir, capsys): ' entry: ./bin/foo.sh\n' ' language: script\n', ) - assert autoupdate(str(cfg), freeze=False, tags_only=True) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0 contents = cfg.read() assert contents == ( 'repos:\n' @@ -512,7 +512,7 @@ def test_updates_old_format_to_new_format(tmpdir, capsys): assert out == 'Configuration has been migrated.\n' -def test_maintains_rev_quoting_style(tmpdir, out_of_date): +def test_maintains_rev_quoting_style(tmpdir, out_of_date, store): fmt = ( 'repos:\n' '- repo: {path}\n' @@ -527,6 +527,6 @@ def test_maintains_rev_quoting_style(tmpdir, out_of_date): cfg = tmpdir.join(C.CONFIG_FILE) cfg.write(fmt.format(path=out_of_date.path, rev=out_of_date.original_rev)) - assert autoupdate(str(cfg), freeze=False, tags_only=False) == 0 + assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0 expected = fmt.format(path=out_of_date.path, rev=out_of_date.head_rev) assert cfg.read() == expected diff --git a/tests/commands/gc_test.py b/tests/commands/gc_test.py index 992b02f3..c128e939 100644 --- a/tests/commands/gc_test.py +++ b/tests/commands/gc_test.py @@ -19,13 +19,11 @@ from testing.util import git_commit def _repo_count(store): - with store.connect() as db: - return db.execute('SELECT COUNT(1) FROM repos').fetchone()[0] + return len(store.select_all_repos()) def _config_count(store): - with store.connect() as db: - return db.execute('SELECT COUNT(1) FROM configs').fetchone()[0] + return len(store.select_all_configs()) def _remove_config_assert_cleared(store, cap_out): @@ -45,9 +43,8 @@ def test_gc(tempdir_factory, store, in_git_dir, cap_out): store.mark_config_used(C.CONFIG_FILE) # update will clone both the old and new repo, making the old one gc-able - assert not install_hooks(C.CONFIG_FILE, store) - assert not autoupdate(C.CONFIG_FILE, freeze=False, tags_only=False) - assert not install_hooks(C.CONFIG_FILE, store) + install_hooks(C.CONFIG_FILE, store) + assert not autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) assert _config_count(store) == 1 assert _repo_count(store) == 2 @@ -155,8 +152,7 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out): install_hooks(C.CONFIG_FILE, store) # we'll "break" the manifest to simulate an old version clone - with store.connect() as db: - path, = db.execute('SELECT path FROM repos').fetchone() + (_, _, path), = store.select_all_repos() os.remove(os.path.join(path, C.MANIFEST_FILE)) assert _config_count(store) == 1 @@ -165,11 +161,3 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out): assert _config_count(store) == 1 assert _repo_count(store) == 0 assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.' - - -def test_gc_pre_1_14_roll_forward(store, cap_out): - with store.connect() as db: # simulate pre-1.14.0 - db.executescript('DROP TABLE configs') - - assert not gc(store) - assert cap_out.get() == '0 repo(s) removed.\n' diff --git a/tests/commands/hazmat_test.py b/tests/commands/hazmat_test.py deleted file mode 100644 index df957e36..00000000 --- a/tests/commands/hazmat_test.py +++ /dev/null @@ -1,99 +0,0 @@ -from __future__ import annotations - -import sys - -import pytest - -from pre_commit.commands.hazmat import _cmd_filenames -from pre_commit.commands.hazmat import main -from testing.util import cwd - - -def test_cmd_filenames_no_dash_dash(): - with pytest.raises(SystemExit) as excinfo: - _cmd_filenames(('no', 'dashdash', 'here')) - msg, = excinfo.value.args - assert msg == 'hazmat entry must end with `--`' - - -def test_cmd_filenames_no_filenames(): - cmd, filenames = _cmd_filenames(('hello', 'world', '--')) - assert cmd == ('hello', 'world') - assert filenames == () - - -def test_cmd_filenames_some_filenames(): - cmd, filenames = _cmd_filenames(('hello', 'world', '--', 'f1', 'f2')) - assert cmd == ('hello', 'world') - assert filenames == ('f1', 'f2') - - -def test_cmd_filenames_multiple_dashdash(): - cmd, filenames = _cmd_filenames(('hello', '--', 'arg', '--', 'f1', 'f2')) - assert cmd == ('hello', '--', 'arg') - assert filenames == ('f1', 'f2') - - -def test_cd_unexpected_filename(): - with pytest.raises(SystemExit) as excinfo: - main(('cd', 'subdir', 'cmd', '--', 'subdir/1', 'not-subdir/2')) - msg, = excinfo.value.args - assert msg == "unexpected file without prefix='subdir/': not-subdir/2" - - -def _norm(out): - return out.replace('\r\n', '\n') - - -def test_cd(tmp_path, capfd): - subdir = tmp_path.joinpath('subdir') - subdir.mkdir() - subdir.joinpath('a').write_text('a') - subdir.joinpath('b').write_text('b') - - with cwd(tmp_path): - ret = main(( - 'cd', 'subdir', - sys.executable, '-c', - 'import os; print(os.getcwd());' - 'import sys; [print(open(f).read()) for f in sys.argv[1:]]', - '--', - 'subdir/a', 'subdir/b', - )) - - assert ret == 0 - out, err = capfd.readouterr() - assert _norm(out) == f'{subdir}\na\nb\n' - assert err == '' - - -def test_ignore_exit_code(capfd): - ret = main(( - 'ignore-exit-code', sys.executable, '-c', 'raise SystemExit("bye")', - )) - assert ret == 0 - out, err = capfd.readouterr() - assert out == '' - assert _norm(err) == 'bye\n' - - -def test_n1(capfd): - ret = main(( - 'n1', sys.executable, '-c', 'import sys; print(sys.argv[1:])', - '--', - 'foo', 'bar', 'baz', - )) - assert ret == 0 - out, err = capfd.readouterr() - assert _norm(out) == "['foo']\n['bar']\n['baz']\n" - assert err == '' - - -def test_n1_some_error_code(): - ret = main(( - 'n1', sys.executable, '-c', - 'import sys; raise SystemExit(sys.argv[1] == "error")', - '--', - 'ok', 'error', 'ok', - )) - assert ret == 1 diff --git a/tests/commands/install_uninstall_test.py b/tests/commands/install_uninstall_test.py index 9eb0e741..8b0d3ece 100644 --- a/tests/commands/install_uninstall_test.py +++ b/tests/commands/install_uninstall_test.py @@ -349,9 +349,8 @@ def test_install_existing_hooks_no_overwrite(tempdir_factory, store): # We should run both the legacy and pre-commit hooks ret, output = _get_commit_output(tempdir_factory) assert ret == 0 - legacy = 'legacy hook\n' - assert output.startswith(legacy) - NORMAL_PRE_COMMIT_RUN.assert_matches(output.removeprefix(legacy)) + assert output.startswith('legacy hook\n') + NORMAL_PRE_COMMIT_RUN.assert_matches(output[len('legacy hook\n'):]) def test_legacy_overwriting_legacy_hook(tempdir_factory, store): @@ -376,9 +375,8 @@ def test_install_existing_hook_no_overwrite_idempotent(tempdir_factory, store): # We should run both the legacy and pre-commit hooks ret, output = _get_commit_output(tempdir_factory) assert ret == 0 - legacy = 'legacy hook\n' - assert output.startswith(legacy) - NORMAL_PRE_COMMIT_RUN.assert_matches(output.removeprefix(legacy)) + assert output.startswith('legacy hook\n') + NORMAL_PRE_COMMIT_RUN.assert_matches(output[len('legacy hook\n'):]) def test_install_with_existing_non_utf8_script(tmpdir, store): diff --git a/tests/commands/migrate_config_test.py b/tests/commands/migrate_config_test.py index a517d2f4..ba184636 100644 --- a/tests/commands/migrate_config_test.py +++ b/tests/commands/migrate_config_test.py @@ -1,26 +1,10 @@ from __future__ import annotations -from unittest import mock - import pytest -import yaml import pre_commit.constants as C from pre_commit.clientlib import InvalidConfigError from pre_commit.commands.migrate_config import migrate_config -from pre_commit.yaml import yaml_compose - - -@pytest.fixture(autouse=True, params=['c', 'pure']) -def switch_pyyaml_impl(request): - if request.param == 'c': - yield - else: - with mock.patch.dict( - yaml_compose.keywords, - {'Loader': yaml.SafeLoader}, - ): - yield def test_migrate_config_normal_format(tmpdir, capsys): @@ -150,27 +134,6 @@ def test_migrate_config_sha_to_rev(tmpdir): ) -def test_migrate_config_sha_to_rev_json(tmp_path): - contents = """\ -{"repos": [{ - "repo": "https://github.com/pre-commit/pre-commit-hooks", - "sha": "v1.2.0", - "hooks": [] -}]} -""" - expected = """\ -{"repos": [{ - "repo": "https://github.com/pre-commit/pre-commit-hooks", - "rev": "v1.2.0", - "hooks": [] -}]} -""" - cfg = tmp_path.joinpath('cfg.yaml') - cfg.write_text(contents) - assert not migrate_config(str(cfg)) - assert cfg.read_text() == expected - - def test_migrate_config_language_python_venv(tmp_path): src = '''\ repos: @@ -204,73 +167,6 @@ repos: assert cfg.read_text() == expected -def test_migrate_config_quoted_python_venv(tmp_path): - src = '''\ -repos: -- repo: local - hooks: - - id: example - name: example - entry: example - language: "python_venv" -''' - expected = '''\ -repos: -- repo: local - hooks: - - id: example - name: example - entry: example - language: "python" -''' - cfg = tmp_path.joinpath('cfg.yaml') - cfg.write_text(src) - assert migrate_config(str(cfg)) == 0 - assert cfg.read_text() == expected - - -def test_migrate_config_default_stages(tmp_path): - src = '''\ -default_stages: [commit, push, merge-commit, commit-msg] -repos: [] -''' - expected = '''\ -default_stages: [pre-commit, pre-push, pre-merge-commit, commit-msg] -repos: [] -''' - cfg = tmp_path.joinpath('cfg.yaml') - cfg.write_text(src) - assert migrate_config(str(cfg)) == 0 - assert cfg.read_text() == expected - - -def test_migrate_config_hook_stages(tmp_path): - src = '''\ -repos: -- repo: local - hooks: - - id: example - name: example - entry: example - language: system - stages: ["commit", "push", "merge-commit", "commit-msg"] -''' - expected = '''\ -repos: -- repo: local - hooks: - - id: example - name: example - entry: example - language: system - stages: ["pre-commit", "pre-push", "pre-merge-commit", "commit-msg"] -''' - cfg = tmp_path.joinpath('cfg.yaml') - cfg.write_text(src) - assert migrate_config(str(cfg)) == 0 - assert cfg.read_text() == expected - - def test_migrate_config_invalid_yaml(tmpdir): contents = '[' cfg = tmpdir.join(C.CONFIG_FILE) diff --git a/tests/commands/run_test.py b/tests/commands/run_test.py index e4af1e16..dd15b94c 100644 --- a/tests/commands/run_test.py +++ b/tests/commands/run_test.py @@ -4,7 +4,7 @@ import os.path import shlex import sys import time -from collections.abc import MutableMapping +from typing import MutableMapping from unittest import mock import pytest @@ -293,7 +293,7 @@ def test_verbose_duration(cap_out, store, in_git_dir, t1, t2, expected): write_config('.', {'repo': 'meta', 'hooks': [{'id': 'identity'}]}) cmd_output('git', 'add', '.') opts = run_opts(verbose=True) - with mock.patch.object(time, 'monotonic', side_effect=(t1, t2)): + with mock.patch.object(time, 'time', side_effect=(t1, t2)): ret, printed = _do_run(cap_out, store, str(in_git_dir), opts) assert ret == 0 assert expected in printed @@ -1088,35 +1088,6 @@ def test_fail_fast_per_hook(cap_out, store, repo_with_failing_hook): assert printed.count(b'Failing hook') == 1 -def test_fail_fast_not_prev_failures(cap_out, store, repo_with_failing_hook): - with modify_config() as config: - config['repos'].append({ - 'repo': 'meta', - 'hooks': [ - {'id': 'identity', 'fail_fast': True}, - {'id': 'identity', 'name': 'run me!'}, - ], - }) - stage_a_file() - - ret, printed = _do_run(cap_out, store, repo_with_failing_hook, run_opts()) - # should still run the last hook since the `fail_fast` one didn't fail - assert printed.count(b'run me!') == 1 - - -def test_fail_fast_run_arg(cap_out, store, repo_with_failing_hook): - with modify_config() as config: - # More than one hook to demonstrate early exit - config['repos'][0]['hooks'] *= 2 - stage_a_file() - - ret, printed = _do_run( - cap_out, store, repo_with_failing_hook, run_opts(fail_fast=True), - ) - # it should have only run one hook due to the CLI flag - assert printed.count(b'Failing hook') == 1 - - def test_classifier_removes_dne(): classifier = Classifier(('this_file_does_not_exist',)) assert classifier.filenames == [] @@ -1156,8 +1127,8 @@ def test_classifier_empty_types_or(tmpdir): types_or=[], exclude_types=[], ) - assert tuple(for_symlink) == ('foo',) - assert tuple(for_file) == ('bar',) + assert for_symlink == ['foo'] + assert for_file == ['bar'] @pytest.fixture @@ -1171,33 +1142,33 @@ def some_filenames(): def test_include_exclude_base_case(some_filenames): ret = filter_by_include_exclude(some_filenames, '', '^$') - assert tuple(ret) == ( + assert ret == [ '.pre-commit-hooks.yaml', 'pre_commit/git.py', 'pre_commit/main.py', - ) + ] def test_matches_broken_symlink(tmpdir): with tmpdir.as_cwd(): os.symlink('does-not-exist', 'link') ret = filter_by_include_exclude({'link'}, '', '^$') - assert tuple(ret) == ('link',) + assert ret == ['link'] def test_include_exclude_total_match(some_filenames): ret = filter_by_include_exclude(some_filenames, r'^.*\.py$', '^$') - assert tuple(ret) == ('pre_commit/git.py', 'pre_commit/main.py') + assert ret == ['pre_commit/git.py', 'pre_commit/main.py'] def test_include_exclude_does_search_instead_of_match(some_filenames): ret = filter_by_include_exclude(some_filenames, r'\.yaml$', '^$') - assert tuple(ret) == ('.pre-commit-hooks.yaml',) + assert ret == ['.pre-commit-hooks.yaml'] def test_include_exclude_exclude_removes_files(some_filenames): ret = filter_by_include_exclude(some_filenames, '', r'\.py$') - assert tuple(ret) == ('.pre-commit-hooks.yaml',) + assert ret == ['.pre-commit-hooks.yaml'] def test_args_hook_only(cap_out, store, repo_with_passing_hook): diff --git a/tests/commands/try_repo_test.py b/tests/commands/try_repo_test.py index c5f891ea..0b2db7e5 100644 --- a/tests/commands/try_repo_test.py +++ b/tests/commands/try_repo_test.py @@ -43,7 +43,7 @@ def _run_try_repo(tempdir_factory, **kwargs): def test_try_repo_repo_only(cap_out, tempdir_factory): - with mock.patch.object(time, 'monotonic', return_value=0.0): + with mock.patch.object(time, 'time', return_value=0.0): _run_try_repo(tempdir_factory, verbose=True) start, config, rest = _get_out(cap_out) assert start == '' diff --git a/tests/conftest.py b/tests/conftest.py index 8c9cd14d..30761715 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations import functools import io +import logging import os.path from unittest import mock @@ -202,25 +203,42 @@ def store(tempdir_factory): yield Store(os.path.join(tempdir_factory.get(), '.pre-commit')) +@pytest.fixture +def log_info_mock(): + with mock.patch.object(logging.getLogger('pre_commit'), 'info') as mck: + yield mck + + +class FakeStream: + def __init__(self): + self.data = io.BytesIO() + + def write(self, s): + self.data.write(s) + + def flush(self): + pass + + class Fixture: - def __init__(self, stream: io.BytesIO) -> None: + def __init__(self, stream): self._stream = stream - def get_bytes(self) -> bytes: + def get_bytes(self): """Get the output as-if no encoding occurred""" - data = self._stream.getvalue() - self._stream.seek(0) - self._stream.truncate() + data = self._stream.data.getvalue() + self._stream.data.seek(0) + self._stream.data.truncate() return data.replace(b'\r\n', b'\n') - def get(self) -> str: + def get(self): """Get the output assuming it was written as UTF-8 bytes""" return self.get_bytes().decode() @pytest.fixture def cap_out(): - stream = io.BytesIO() + stream = FakeStream() write = functools.partial(output.write, stream=stream) write_line_b = functools.partial(output.write_line_b, stream=stream) with mock.patch.multiple(output, write=write, write_line_b=write_line_b): diff --git a/tests/git_test.py b/tests/git_test.py index 02b6ce3a..93f5a1c6 100644 --- a/tests/git_test.py +++ b/tests/git_test.py @@ -141,15 +141,6 @@ def test_get_conflicted_files_unstaged_files(in_merge_conflict): assert ret == {'conflict_file'} -def test_get_conflicted_files_with_file_named_head(in_merge_conflict): - resolve_conflict() - open('HEAD', 'w').close() - cmd_output('git', 'add', 'HEAD') - - ret = set(git.get_conflicted_files()) - assert ret == {'conflict_file', 'HEAD'} - - MERGE_MSG = b"Merge branch 'foo' into bar\n\nConflicts:\n\tconflict_file\n" OTHER_MERGE_MSG = MERGE_MSG + b'\tother_conflict_file\n' diff --git a/tests/lang_base_test.py b/tests/lang_base_test.py index 9fac83da..a532b6a5 100644 --- a/tests/lang_base_test.py +++ b/tests/lang_base_test.py @@ -1,5 +1,6 @@ from __future__ import annotations +import multiprocessing import os.path import sys from unittest import mock @@ -9,7 +10,6 @@ import pytest import pre_commit.constants as C from pre_commit import lang_base from pre_commit import parse_shebang -from pre_commit import xargs from pre_commit.prefix import Prefix from pre_commit.util import CalledProcessError @@ -116,23 +116,30 @@ def test_no_env_noop(tmp_path): assert before == inside == after -@pytest.fixture -def cpu_count_mck(): - with mock.patch.object(xargs, 'cpu_count', return_value=4): - yield +def test_target_concurrency_normal(): + with mock.patch.object(multiprocessing, 'cpu_count', return_value=123): + with mock.patch.dict(os.environ, {}, clear=True): + assert lang_base.target_concurrency() == 123 -@pytest.mark.parametrize( - ('var', 'expected'), - ( - ('PRE_COMMIT_NO_CONCURRENCY', 1), - ('TRAVIS', 2), - (None, 4), - ), -) -def test_target_concurrency(cpu_count_mck, var, expected): - with mock.patch.dict(os.environ, {var: '1'} if var else {}, clear=True): - assert lang_base.target_concurrency() == expected +def test_target_concurrency_testing_env_var(): + with mock.patch.dict( + os.environ, {'PRE_COMMIT_NO_CONCURRENCY': '1'}, clear=True, + ): + assert lang_base.target_concurrency() == 1 + + +def test_target_concurrency_on_travis(): + with mock.patch.dict(os.environ, {'TRAVIS': '1'}, clear=True): + assert lang_base.target_concurrency() == 2 + + +def test_target_concurrency_cpu_count_not_implemented(): + with mock.patch.object( + multiprocessing, 'cpu_count', side_effect=NotImplementedError, + ): + with mock.patch.dict(os.environ, {}, clear=True): + assert lang_base.target_concurrency() == 1 def test_shuffled_is_deterministic(): @@ -164,15 +171,3 @@ def test_basic_run_hook(tmp_path): assert ret == 0 out = out.replace(b'\r\n', b'\n') assert out == b'hi hello file file file\n' - - -def test_hook_cmd(): - assert lang_base.hook_cmd('echo hi', ()) == ('echo', 'hi') - - -def test_hook_cmd_hazmat(): - ret = lang_base.hook_cmd('pre-commit hazmat cd a echo -- b', ()) - assert ret == ( - sys.executable, '-m', 'pre_commit.commands.hazmat', - 'cd', 'a', 'echo', '--', 'b', - ) diff --git a/tests/languages/dart_test.py b/tests/languages/dart_test.py index 213d888e..5bb5aa68 100644 --- a/tests/languages/dart_test.py +++ b/tests/languages/dart_test.py @@ -10,7 +10,7 @@ from testing.language_helpers import run_language def test_dart(tmp_path): pubspec_yaml = '''\ environment: - sdk: '>=2.12.0 <4.0.0' + sdk: '>=2.10.0 <3.0.0' name: hello_world_dart diff --git a/tests/languages/docker_image_test.py b/tests/languages/docker_image_test.py index 4f720600..7993c11a 100644 --- a/tests/languages/docker_image_test.py +++ b/tests/languages/docker_image_test.py @@ -1,18 +1,10 @@ from __future__ import annotations -import pytest - from pre_commit.languages import docker_image -from pre_commit.util import cmd_output_b from testing.language_helpers import run_language from testing.util import xfailif_windows -@pytest.fixture(autouse=True, scope='module') -def _ensure_image_available(): - cmd_output_b('docker', 'run', '--rm', 'ubuntu:22.04', 'echo') - - @xfailif_windows # pragma: win32 no cover def test_docker_image_hook_via_entrypoint(tmp_path): ret = run_language( @@ -33,27 +25,3 @@ def test_docker_image_hook_via_args(tmp_path): args=('hello hello world',), ) assert ret == (0, b'hello hello world\n') - - -@xfailif_windows # pragma: win32 no cover -def test_docker_image_color_tty(tmp_path): - ret = run_language( - tmp_path, - docker_image, - 'ubuntu:22.04', - args=('grep', '--color', 'root', '/etc/group'), - color=True, - ) - assert ret == (0, b'\x1b[01;31m\x1b[Kroot\x1b[m\x1b[K:x:0:\n') - - -@xfailif_windows # pragma: win32 no cover -def test_docker_image_no_color_no_tty(tmp_path): - ret = run_language( - tmp_path, - docker_image, - 'ubuntu:22.04', - args=('grep', '--color', 'root', '/etc/group'), - color=False, - ) - assert ret == (0, b'root:x:0:\n') diff --git a/tests/languages/docker_test.py b/tests/languages/docker_test.py index e269976f..836382a8 100644 --- a/tests/languages/docker_test.py +++ b/tests/languages/docker_test.py @@ -14,173 +14,40 @@ from pre_commit.util import CalledProcessError from testing.language_helpers import run_language from testing.util import xfailif_windows -DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\ -759 717 0:52 / / rw,relatime master:300 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/PCPE5P5IVGM7CFCPJR353N3ONK:/var/lib/docker/overlay2/l/EQFSDHFAJ333VEMEJD4ZTRIZCB,upperdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/diff,workdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/work -760 759 0:58 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw -761 759 0:59 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -762 761 0:60 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 -763 759 0:61 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro -764 763 0:62 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs tmpfs rw,mode=755,inode64 -765 764 0:29 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime master:11 - cgroup cgroup rw,xattr,name=systemd -766 764 0:32 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime master:15 - cgroup cgroup rw,rdma -767 764 0:33 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime master:16 - cgroup cgroup rw,cpu,cpuacct -768 764 0:34 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime master:17 - cgroup cgroup rw,cpuset -769 764 0:35 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime master:18 - cgroup cgroup rw,pids -770 764 0:36 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime master:19 - cgroup cgroup rw,memory -771 764 0:37 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime master:20 - cgroup cgroup rw,perf_event -772 764 0:38 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime master:21 - cgroup cgroup rw,net_cls,net_prio -773 764 0:39 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime master:22 - cgroup cgroup rw,blkio -774 764 0:40 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime master:23 - cgroup cgroup rw,misc -775 764 0:41 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime master:24 - cgroup cgroup rw,hugetlb -776 764 0:42 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime master:25 - cgroup cgroup rw,devices -777 764 0:43 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime master:26 - cgroup cgroup rw,freezer -778 761 0:57 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw -779 761 0:63 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64 -780 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro -781 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro -782 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro -718 761 0:60 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 -719 760 0:58 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw -720 760 0:58 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw -721 760 0:58 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw -722 760 0:58 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw -723 760 0:58 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw -724 760 0:64 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64 -725 760 0:65 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64 -726 760 0:59 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -727 760 0:59 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -728 760 0:59 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -729 760 0:66 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64 -730 763 0:67 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64 -731 763 0:68 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs ro,inode64 -''' # noqa: E501 - -DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\ -721 386 0:45 / / rw,relatime master:218 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/QHZ7OM7P4AQD3XLG274ZPWAJCV:/var/lib/docker/overlay2/l/5RFG6SZWVGOG2NKEYXJDQCQYX5,upperdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/diff,workdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/work,nouserxattr -722 721 0:48 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw -723 721 0:50 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -724 723 0:51 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 -725 721 0:52 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro -726 725 0:26 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw,nsdelegate,memory_recursiveprot -727 723 0:47 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw -728 723 0:53 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64 -729 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro -730 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro -731 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro -387 723 0:51 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 -388 722 0:48 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw -389 722 0:48 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw -525 722 0:48 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw -526 722 0:48 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw -571 722 0:48 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw -572 722 0:57 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64 -575 722 0:58 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64 -576 722 0:50 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -577 722 0:50 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -578 722 0:50 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 -579 722 0:59 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64 -580 725 0:60 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64 -''' # noqa: E501 - -PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\ -1200 915 0:57 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/ZWAU3VY3ZHABQJRBUAFPBX7R5D,upperdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/work,volatile,userxattr -1204 1200 0:62 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw -1205 1200 0:63 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64 -1206 1200 0:64 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw -1207 1205 0:65 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 -1208 1205 0:61 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw -1209 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 -1210 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 -1211 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 -1212 1205 0:56 / /dev/shm rw,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64 -1213 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 -1214 1206 0:66 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs cgroup rw,size=1024k,uid=1000,gid=1000,inode64 -1215 1214 0:43 / /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,freezer -1216 1214 0:42 /user.slice /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,devices -1217 1214 0:41 / /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,hugetlb -1218 1214 0:40 / /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,misc -1219 1214 0:39 / /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,blkio -1220 1214 0:38 / /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,net_cls,net_prio -1221 1214 0:37 / /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,perf_event -1222 1214 0:36 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,memory -1223 1214 0:35 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,pids -1224 1214 0:34 / /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpuset -1225 1214 0:33 / /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpu,cpuacct -1226 1214 0:32 / /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,rdma -1227 1214 0:29 /user.slice/user-1000.slice/user@1000.service/apps.slice/apps-org.gnome.Terminal.slice/vte-spawn-0c50448e-b395-4d76-8b92-379f16e5066f.scope /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,xattr,name=systemd -1228 1205 0:5 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1229 1205 0:5 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1230 1205 0:5 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1231 1205 0:5 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1232 1205 0:5 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1233 1205 0:5 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1234 1204 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -1235 1204 0:5 /null /proc/kcore rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1236 1204 0:5 /null /proc/keys rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1237 1204 0:5 /null /proc/timer_list rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 -1238 1204 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -1239 1206 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -1240 1206 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -1241 1204 0:62 /asound /proc/asound ro,relatime - proc proc rw -1242 1204 0:62 /bus /proc/bus ro,relatime - proc proc rw -1243 1204 0:62 /fs /proc/fs ro,relatime - proc proc rw -1244 1204 0:62 /irq /proc/irq ro,relatime - proc proc rw -1245 1204 0:62 /sys /proc/sys ro,relatime - proc proc rw -1256 1204 0:62 /sysrq-trigger /proc/sysrq-trigger ro,relatime - proc proc rw -916 1205 0:65 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 -''' # noqa: E501 - -PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\ -685 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 -686 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 -687 692 0:50 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64 -688 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 -689 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 -690 546 0:55 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/NPOHYOD3PI3YW6TQSGBOVOUSK6,upperdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/work,redirect_dir=nofollow,uuid=on,volatile,userxattr -691 690 0:59 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw -692 690 0:61 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64 -693 690 0:62 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw -694 692 0:66 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 -695 692 0:58 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw -696 693 0:28 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup2 rw,nsdelegate,memory_recursiveprot -698 692 0:6 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -699 692 0:6 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -700 692 0:6 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -701 692 0:6 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -702 692 0:6 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -703 692 0:6 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -704 691 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -705 691 0:6 /null /proc/kcore ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -706 691 0:6 /null /proc/keys ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -707 691 0:6 /null /proc/latency_stats ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -708 691 0:6 /null /proc/timer_list ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 -709 691 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -710 693 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -711 693 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -712 693 0:71 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 -713 691 0:59 /asound /proc/asound ro,nosuid,nodev,noexec,relatime - proc proc rw -714 691 0:59 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw -715 691 0:59 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw -716 691 0:59 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw -717 691 0:59 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw -718 691 0:59 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw -547 692 0:66 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 +DOCKER_CGROUP_EXAMPLE = b'''\ +12:hugetlb:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +11:blkio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +10:freezer:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +9:cpu,cpuacct:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +8:pids:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +7:rdma:/ +6:net_cls,net_prio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +5:cpuset:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +4:devices:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +3:memory:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +2:perf_event:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +1:name=systemd:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 +0::/system.slice/containerd.service ''' # noqa: E501 # The ID should match the above cgroup example. CONTAINER_ID = 'c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7' # noqa: E501 -NON_DOCKER_MOUNTINFO_EXAMPLE = b'''\ -21 27 0:19 / /sys rw,nosuid,nodev,noexec,relatime shared:7 - sysfs sysfs rw -22 27 0:20 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw -23 27 0:5 / /dev rw,nosuid,relatime shared:2 - devtmpfs udev rw,size=10219484k,nr_inodes=2554871,mode=755,inode64 -24 23 0:21 / /dev/pts rw,nosuid,noexec,relatime shared:3 - devpts devpts rw,gid=5,mode=620,ptmxmode=000 -25 27 0:22 / /run rw,nosuid,nodev,noexec,relatime shared:5 - tmpfs tmpfs rw,size=2047768k,mode=755,inode64 -27 1 8:2 / / rw,relatime shared:1 - ext4 /dev/sda2 rw,errors=remount-ro -28 21 0:6 / /sys/kernel/security rw,nosuid,nodev,noexec,relatime shared:8 - securityfs securityfs rw -29 23 0:24 / /dev/shm rw,nosuid,nodev shared:4 - tmpfs tmpfs rw,inode64 -30 25 0:25 / /run/lock rw,nosuid,nodev,noexec,relatime shared:6 - tmpfs tmpfs rw,size=5120k,inode64 -''' # noqa: E501 +NON_DOCKER_CGROUP_EXAMPLE = b'''\ +12:perf_event:/ +11:hugetlb:/ +10:devices:/ +9:blkio:/ +8:rdma:/ +7:cpuset:/ +6:cpu,cpuacct:/ +5:freezer:/ +4:memory:/ +3:pids:/ +2:net_cls,net_prio:/ +1:name=systemd:/init.scope +0::/init.scope +''' def test_docker_fallback_user(): @@ -195,46 +62,9 @@ def test_docker_fallback_user(): assert docker.get_docker_user() == () -@pytest.fixture(autouse=True) -def _avoid_cache(): - with mock.patch.object( - docker, - '_is_rootless', - docker._is_rootless.__wrapped__, - ): - yield - - -@pytest.mark.parametrize( - 'info_ret', - ( - (0, b'{"SecurityOptions": ["name=rootless","name=cgroupns"]}', b''), - (0, b'{"host": {"security": {"rootless": true}}}', b''), - ), -) -def test_docker_user_rootless(info_ret): - with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret): - assert docker.get_docker_user() == () - - -@pytest.mark.parametrize( - 'info_ret', - ( - (0, b'{"SecurityOptions": ["name=cgroupns"]}', b''), - (0, b'{"host": {"security": {"rootless": false}}}', b''), - (0, b'{"response_from_some_other_container_engine": true}', b''), - (0, b'{"SecurityOptions": null}', b''), - (1, b'', b''), - ), -) -def test_docker_user_non_rootless(info_ret): - with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret): - assert docker.get_docker_user() != () - - -def test_container_id_no_file(): +def test_in_docker_no_file(): with mock.patch.object(builtins, 'open', side_effect=FileNotFoundError): - assert docker._get_container_id() is None + assert docker._is_in_docker() is False def _mock_open(data): @@ -246,33 +76,38 @@ def _mock_open(data): ) -def test_container_id_not_in_file(): - with _mock_open(NON_DOCKER_MOUNTINFO_EXAMPLE): - assert docker._get_container_id() is None +def test_in_docker_docker_in_file(): + with _mock_open(DOCKER_CGROUP_EXAMPLE): + assert docker._is_in_docker() is True + + +def test_in_docker_docker_not_in_file(): + with _mock_open(NON_DOCKER_CGROUP_EXAMPLE): + assert docker._is_in_docker() is False def test_get_container_id(): - with _mock_open(DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE): - assert docker._get_container_id() == CONTAINER_ID - with _mock_open(DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE): - assert docker._get_container_id() == CONTAINER_ID - with _mock_open(PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE): - assert docker._get_container_id() == CONTAINER_ID - with _mock_open(PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE): + with _mock_open(DOCKER_CGROUP_EXAMPLE): assert docker._get_container_id() == CONTAINER_ID +def test_get_container_id_failure(): + with _mock_open(b''), pytest.raises(RuntimeError): + docker._get_container_id() + + def test_get_docker_path_not_in_docker_returns_same(): - with _mock_open(b''): + with mock.patch.object(docker, '_is_in_docker', return_value=False): assert docker._get_docker_path('abc') == 'abc' @pytest.fixture def in_docker(): - with mock.patch.object( - docker, '_get_container_id', return_value=CONTAINER_ID, - ): - yield + with mock.patch.object(docker, '_is_in_docker', return_value=True): + with mock.patch.object( + docker, '_get_container_id', return_value=CONTAINER_ID, + ): + yield def _linux_commonpath(): @@ -360,14 +195,3 @@ CMD ["echo", "This is overwritten by the entry"'] ret = run_language(tmp_path, docker, 'echo hello hello world') assert ret == (0, b'hello hello world\n') - - -@xfailif_windows # pragma: win32 no cover -def test_docker_hook_mount_permissions(tmp_path): - dockerfile = '''\ -FROM ubuntu:22.04 -''' - tmp_path.joinpath('Dockerfile').write_text(dockerfile) - - retcode, _ = run_language(tmp_path, docker, 'touch', ('README.md',)) - assert retcode == 0 diff --git a/tests/languages/dotnet_test.py b/tests/languages/dotnet_test.py index ee408256..470c03b2 100644 --- a/tests/languages/dotnet_test.py +++ b/tests/languages/dotnet_test.py @@ -27,7 +27,7 @@ def _csproj(tool_name): Exe - net8 + net6 true {tool_name} ./nupkg diff --git a/tests/languages/golang_test.py b/tests/languages/golang_test.py index 7fb6ab18..ec5a8787 100644 --- a/tests/languages/golang_test.py +++ b/tests/languages/golang_test.py @@ -7,18 +7,10 @@ import re_assert import pre_commit.constants as C from pre_commit import lang_base -from pre_commit.commands.install_uninstall import install from pre_commit.envcontext import envcontext from pre_commit.languages import golang from pre_commit.store import _make_local_repo -from pre_commit.util import CalledProcessError -from pre_commit.util import cmd_output -from testing.fixtures import add_config_to_repo -from testing.fixtures import make_config_from_repo from testing.language_helpers import run_language -from testing.util import cmd_output_mocked_pre_commit_home -from testing.util import cwd -from testing.util import git_commit ACTUAL_GET_DEFAULT_VERSION = golang.get_default_version.__wrapped__ @@ -119,11 +111,11 @@ def test_golang_versioned(tmp_path): tmp_path, golang, 'go version', - version='1.21.1', + version='1.18.4', ) assert ret == 0 - assert out.startswith(b'go version go1.21.1') + assert out.startswith(b'go version go1.18.4') def test_local_golang_additional_deps(tmp_path): @@ -136,101 +128,9 @@ def test_local_golang_additional_deps(tmp_path): deps=('golang.org/x/example/hello@latest',), ) - assert ret == (0, b'Hello, world!\n') + assert ret == (0, b'Hello, Go examples!\n') def test_golang_hook_still_works_when_gobin_is_set(tmp_path): with envcontext((('GOBIN', str(tmp_path.joinpath('gobin'))),)): test_golang_system(tmp_path) - - -def test_during_commit_all(tmp_path, tempdir_factory, store, in_git_dir): - hook_dir = tmp_path.joinpath('hook') - hook_dir.mkdir() - _make_hello_world(hook_dir) - hook_dir.joinpath('.pre-commit-hooks.yaml').write_text( - '- id: hello-world\n' - ' name: hello world\n' - ' entry: golang-hello-world\n' - ' language: golang\n' - ' always_run: true\n', - ) - cmd_output('git', 'init', hook_dir) - cmd_output('git', 'add', '.', cwd=hook_dir) - git_commit(cwd=hook_dir) - - add_config_to_repo(in_git_dir, make_config_from_repo(hook_dir)) - - assert not install(C.CONFIG_FILE, store, hook_types=['pre-commit']) - - git_commit( - fn=cmd_output_mocked_pre_commit_home, - tempdir_factory=tempdir_factory, - ) - - -def test_automatic_toolchain_switching(tmp_path): - go_mod = '''\ -module toolchain-version-test - -go 1.23.1 -''' - main_go = '''\ -package main - -func main() {} -''' - tmp_path.joinpath('go.mod').write_text(go_mod) - mod_dir = tmp_path.joinpath('toolchain-version-test') - mod_dir.mkdir() - main_file = mod_dir.joinpath('main.go') - main_file.write_text(main_go) - - with pytest.raises(CalledProcessError) as excinfo: - run_language( - path=tmp_path, - language=golang, - version='1.22.0', - exe='golang-version-test', - ) - - assert 'go.mod requires go >= 1.23.1' in excinfo.value.stderr.decode() - - -def test_automatic_toolchain_switching_go_fmt(tmp_path, monkeypatch): - go_mod_hook = '''\ -module toolchain-version-test - -go 1.22.0 -''' - go_mod = '''\ -module toolchain-version-test - -go 1.23.1 -''' - main_go = '''\ -package main - -func main() {} -''' - hook_dir = tmp_path.joinpath('hook') - hook_dir.mkdir() - hook_dir.joinpath('go.mod').write_text(go_mod_hook) - - test_dir = tmp_path.joinpath('test') - test_dir.mkdir() - test_dir.joinpath('go.mod').write_text(go_mod) - main_file = test_dir.joinpath('main.go') - main_file.write_text(main_go) - - with cwd(test_dir): - ret, out = run_language( - path=hook_dir, - language=golang, - version='1.22.0', - exe='go fmt', - file_args=(str(main_file),), - ) - - assert ret == 1 - assert 'go.mod requires go >= 1.23.1' in out.decode() diff --git a/tests/languages/haskell_test.py b/tests/languages/haskell_test.py deleted file mode 100644 index f888109b..00000000 --- a/tests/languages/haskell_test.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -import pytest - -from pre_commit.errors import FatalError -from pre_commit.languages import haskell -from pre_commit.util import win_exe -from testing.language_helpers import run_language - - -def test_run_example_executable(tmp_path): - example_cabal = '''\ -cabal-version: 2.4 -name: example -version: 0.1.0.0 - -executable example - main-is: Main.hs - - build-depends: base >=4 - default-language: Haskell2010 -''' - main_hs = '''\ -module Main where - -main :: IO () -main = putStrLn "Hello, Haskell!" -''' - tmp_path.joinpath('example.cabal').write_text(example_cabal) - tmp_path.joinpath('Main.hs').write_text(main_hs) - - result = run_language(tmp_path, haskell, 'example') - assert result == (0, b'Hello, Haskell!\n') - - # should not symlink things into environments - exe = tmp_path.joinpath(win_exe('hs_env-default/bin/example')) - assert exe.is_file() - assert not exe.is_symlink() - - -def test_run_dep(tmp_path): - result = run_language(tmp_path, haskell, 'hello', deps=['hello']) - assert result == (0, b'Hello, World!\n') - - -def test_run_empty(tmp_path): - with pytest.raises(FatalError) as excinfo: - run_language(tmp_path, haskell, 'example') - msg, = excinfo.value.args - assert msg == 'Expected .cabal files or additional_dependencies' diff --git a/tests/languages/julia_test.py b/tests/languages/julia_test.py deleted file mode 100644 index 175622d6..00000000 --- a/tests/languages/julia_test.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -import os -from unittest import mock - -from pre_commit.languages import julia -from testing.language_helpers import run_language -from testing.util import cwd - - -def _make_hook(tmp_path, julia_code): - src_dir = tmp_path.joinpath('src') - src_dir.mkdir() - src_dir.joinpath('main.jl').write_text(julia_code) - tmp_path.joinpath('Project.toml').write_text( - '[deps]\n' - 'Example = "7876af07-990d-54b4-ab0e-23690620f79a"\n', - ) - - -def test_julia_hook(tmp_path): - code = """ - using Example - function main() - println("Hello, world!") - end - main() - """ - _make_hook(tmp_path, code) - expected = (0, b'Hello, world!\n') - assert run_language(tmp_path, julia, 'src/main.jl') == expected - - -def test_julia_hook_with_startup(tmp_path): - depot_path = tmp_path.joinpath('depot') - depot_path.joinpath('config').mkdir(parents=True) - startup = depot_path.joinpath('config', 'startup.jl') - startup.write_text('error("Startup file used!")\n') - - depo_path_var = f'{depot_path}{os.pathsep}' - with mock.patch.dict(os.environ, {'JULIA_DEPOT_PATH': depo_path_var}): - test_julia_hook(tmp_path) - - -def test_julia_hook_manifest(tmp_path): - code = """ - using Example - println(pkgversion(Example)) - """ - _make_hook(tmp_path, code) - - tmp_path.joinpath('Manifest.toml').write_text( - 'manifest_format = "2.0"\n\n' - '[[deps.Example]]\n' - 'git-tree-sha1 = "11820aa9c229fd3833d4bd69e5e75ef4e7273bf1"\n' - 'uuid = "7876af07-990d-54b4-ab0e-23690620f79a"\n' - 'version = "0.5.4"\n', - ) - expected = (0, b'0.5.4\n') - assert run_language(tmp_path, julia, 'src/main.jl') == expected - - -def test_julia_hook_args(tmp_path): - code = """ - function main(argv) - foreach(println, argv) - end - main(ARGS) - """ - _make_hook(tmp_path, code) - expected = (0, b'--arg1\n--arg2\n') - assert run_language( - tmp_path, julia, 'src/main.jl --arg1 --arg2', - ) == expected - - -def test_julia_hook_additional_deps(tmp_path): - code = """ - using TOML - function main() - project_file = Base.active_project() - dict = TOML.parsefile(project_file) - for (k, v) in dict["deps"] - println(k, " = ", v) - end - end - main() - """ - _make_hook(tmp_path, code) - deps = ('TOML=fa267f1f-6049-4f14-aa54-33bafae1ed76',) - ret, out = run_language(tmp_path, julia, 'src/main.jl', deps=deps) - assert ret == 0 - assert b'Example = 7876af07-990d-54b4-ab0e-23690620f79a' in out - assert b'TOML = fa267f1f-6049-4f14-aa54-33bafae1ed76' in out - - -def test_julia_repo_local(tmp_path): - env_dir = tmp_path.joinpath('envdir') - env_dir.mkdir() - local_dir = tmp_path.joinpath('local') - local_dir.mkdir() - local_dir.joinpath('local.jl').write_text( - 'using TOML; foreach(println, ARGS)', - ) - with cwd(local_dir): - deps = ('TOML=fa267f1f-6049-4f14-aa54-33bafae1ed76',) - expected = (0, b'--local-arg1\n--local-arg2\n') - assert run_language( - env_dir, julia, 'local.jl --local-arg1 --local-arg2', - deps=deps, is_local=True, - ) == expected diff --git a/tests/languages/node_test.py b/tests/languages/node_test.py index 055cb1e9..cba0228b 100644 --- a/tests/languages/node_test.py +++ b/tests/languages/node_test.py @@ -139,7 +139,7 @@ def test_node_with_user_config_set(tmp_path): test_node_hook_system(tmp_path) -@pytest.mark.parametrize('version', (C.DEFAULT, '18.14.0')) +@pytest.mark.parametrize('version', (C.DEFAULT, '18.13.0')) def test_node_hook_versions(tmp_path, version): _make_hello_world(tmp_path) ret = run_language(tmp_path, node, 'node-hello', version=version) diff --git a/tests/languages/python_test.py b/tests/languages/python_test.py index 593634b7..ab26e14e 100644 --- a/tests/languages/python_test.py +++ b/tests/languages/python_test.py @@ -10,11 +10,8 @@ import pre_commit.constants as C from pre_commit.envcontext import envcontext from pre_commit.languages import python from pre_commit.prefix import Prefix -from pre_commit.store import _make_local_repo -from pre_commit.util import cmd_output_b from pre_commit.util import make_executable from pre_commit.util import win_exe -from testing.auto_namedtuple import auto_namedtuple from testing.language_helpers import run_language @@ -37,72 +34,6 @@ def test_read_pyvenv_cfg_non_utf8(tmpdir): assert python._read_pyvenv_cfg(pyvenv_cfg) == expected -def _get_default_version( - *, - impl: str, - exe: str, - found: set[str], - version: tuple[int, int], -) -> str: - sys_exe = f'/fake/path/{exe}' - sys_impl = auto_namedtuple(name=impl) - sys_ver = auto_namedtuple(major=version[0], minor=version[1]) - - def find_exe(s): - if s in found: - return f'/fake/path/found/{exe}' - else: - return None - - with ( - mock.patch.object(sys, 'implementation', sys_impl), - mock.patch.object(sys, 'executable', sys_exe), - mock.patch.object(sys, 'version_info', sys_ver), - mock.patch.object(python, 'find_executable', find_exe), - ): - return python.get_default_version.__wrapped__() - - -def test_default_version_sys_executable_found(): - ret = _get_default_version( - impl='cpython', - exe='python3.12', - found={'python3.12'}, - version=(3, 12), - ) - assert ret == 'python3.12' - - -def test_default_version_picks_specific_when_found(): - ret = _get_default_version( - impl='cpython', - exe='python3', - found={'python3', 'python3.12'}, - version=(3, 12), - ) - assert ret == 'python3.12' - - -def test_default_version_picks_pypy_versioned_exe(): - ret = _get_default_version( - impl='pypy', - exe='python', - found={'pypy3.12', 'python3'}, - version=(3, 12), - ) - assert ret == 'pypy3.12' - - -def test_default_version_picks_pypy_unversioned_exe(): - ret = _get_default_version( - impl='pypy', - exe='python', - found={'pypy3', 'python3'}, - version=(3, 12), - ) - assert ret == 'pypy3' - - def test_norm_version_expanduser(): home = os.path.expanduser('~') if sys.platform == 'win32': # pragma: win32 cover @@ -353,15 +284,3 @@ def test_python_hook_weird_setup_cfg(tmp_path): ret = run_language(tmp_path, python, 'socks', [os.devnull]) assert ret == (0, f'[{os.devnull!r}]\nhello hello\n'.encode()) - - -def test_local_repo_with_other_artifacts(tmp_path): - cmd_output_b('git', 'init', tmp_path) - _make_local_repo(str(tmp_path)) - # pretend a rust install also ran here - tmp_path.joinpath('target').mkdir() - - ret, out = run_language(tmp_path, python, 'python --version') - - assert ret == 0 - assert out.startswith(b'Python ') diff --git a/tests/languages/r_test.py b/tests/languages/r_test.py index 9e73129e..02c559cb 100644 --- a/tests/languages/r_test.py +++ b/tests/languages/r_test.py @@ -1,17 +1,14 @@ from __future__ import annotations import os.path -from unittest import mock +import shutil import pytest -import pre_commit.constants as C from pre_commit import envcontext -from pre_commit import lang_base from pre_commit.languages import r from pre_commit.prefix import Prefix from pre_commit.store import _make_local_repo -from pre_commit.util import resource_text from pre_commit.util import win_exe from testing.language_helpers import run_language @@ -130,8 +127,7 @@ def test_path_rscript_exec_no_r_home_set(): assert r._rscript_exec() == 'Rscript' -@pytest.fixture -def renv_lock_file(tmp_path): +def test_r_hook(tmp_path): renv_lock = '''\ { "R": { @@ -161,12 +157,6 @@ def renv_lock_file(tmp_path): } } ''' - tmp_path.joinpath('renv.lock').write_text(renv_lock) - yield - - -@pytest.fixture -def description_file(tmp_path): description = '''\ Package: gli.clu Title: What the Package Does (One Line, Title Case) @@ -188,39 +178,27 @@ RoxygenNote: 7.1.1 Imports: rprojroot ''' - tmp_path.joinpath('DESCRIPTION').write_text(description) - yield - - -@pytest.fixture -def hello_world_file(tmp_path): - hello_world = '''\ + hello_world_r = '''\ stopifnot( packageVersion('rprojroot') == '1.0', packageVersion('gli.clu') == '0.0.0.9000' ) cat("Hello, World, from R!\n") ''' - tmp_path.joinpath('hello-world.R').write_text(hello_world) - yield - -@pytest.fixture -def renv_folder(tmp_path): + tmp_path.joinpath('renv.lock').write_text(renv_lock) + tmp_path.joinpath('DESCRIPTION').write_text(description) + tmp_path.joinpath('hello-world.R').write_text(hello_world_r) renv_dir = tmp_path.joinpath('renv') renv_dir.mkdir() - activate_r = resource_text('empty_template_activate.R') - renv_dir.joinpath('activate.R').write_text(activate_r) - yield + shutil.copy( + os.path.join( + os.path.dirname(__file__), + '../../pre_commit/resources/empty_template_activate.R', + ), + renv_dir.joinpath('activate.R'), + ) - -def test_r_hook( - tmp_path, - renv_lock_file, - description_file, - hello_world_file, - renv_folder, -): expected = (0, b'Hello, World, from R!\n') assert run_language(tmp_path, r, 'Rscript hello-world.R') == expected @@ -243,55 +221,3 @@ Rscript -e ' args=('hi', 'hello'), ) assert ret == (0, b'hi, hello, from R!\n') - - -@pytest.fixture -def prefix(tmpdir): - yield Prefix(str(tmpdir)) - - -@pytest.fixture -def installed_environment( - renv_lock_file, - hello_world_file, - renv_folder, - prefix, -): - env_dir = lang_base.environment_dir( - prefix, r.ENVIRONMENT_DIR, r.get_default_version(), - ) - r.install_environment(prefix, C.DEFAULT, ()) - yield prefix, env_dir - - -def test_health_check_healthy(installed_environment): - # should be healthy right after creation - prefix, _ = installed_environment - assert r.health_check(prefix, C.DEFAULT) is None - - -def test_health_check_after_downgrade(installed_environment): - prefix, _ = installed_environment - - # pretend the saved installed version is old - with mock.patch.object(r, '_read_installed_version', return_value='1.0.0'): - output = r.health_check(prefix, C.DEFAULT) - - assert output is not None - assert output.startswith('Hooks were installed for R version') - - -@pytest.mark.parametrize('version', ('NULL', 'NA', "''")) -def test_health_check_without_version(prefix, installed_environment, version): - prefix, env_dir = installed_environment - - # simulate old pre-commit install by unsetting the installed version - r._execute_r_in_renv( - f'renv::settings$r.version({version})', - prefix=prefix, version=C.DEFAULT, cwd=env_dir, - ) - - # no R version specified fails as unhealty - msg = 'Hooks were installed with an unknown R version' - check_output = r.health_check(prefix, C.DEFAULT) - assert check_output is not None and check_output.startswith(msg) diff --git a/tests/languages/ruby_test.py b/tests/languages/ruby_test.py index 5d767b25..6397a434 100644 --- a/tests/languages/ruby_test.py +++ b/tests/languages/ruby_test.py @@ -91,8 +91,8 @@ def test_ruby_additional_deps(tmp_path): tmp_path, ruby, 'ruby -e', - args=('require "jmespath"',), - deps=('jmespath',), + args=('require "tins"',), + deps=('tins',), ) assert ret == (0, b'') diff --git a/tests/languages/rust_test.py b/tests/languages/rust_test.py index 52e35613..5c17f5b6 100644 --- a/tests/languages/rust_test.py +++ b/tests/languages/rust_test.py @@ -9,7 +9,6 @@ from pre_commit import parse_shebang from pre_commit.languages import rust from pre_commit.store import _make_local_repo from testing.language_helpers import run_language -from testing.util import cwd ACTUAL_GET_DEFAULT_VERSION = rust.get_default_version.__wrapped__ @@ -30,14 +29,6 @@ def test_uses_default_when_rust_is_not_available(cmd_output_b_mck): assert ACTUAL_GET_DEFAULT_VERSION() == C.DEFAULT -def test_selects_system_even_if_rust_toolchain_toml(tmp_path): - toolchain_toml = '[toolchain]\nchannel = "wtf"\n' - tmp_path.joinpath('rust-toolchain.toml').write_text(toolchain_toml) - - with cwd(tmp_path): - assert ACTUAL_GET_DEFAULT_VERSION() == 'system' - - def _make_hello_world(tmp_path): src_dir = tmp_path.joinpath('src') src_dir.mkdir() diff --git a/tests/languages/unsupported_script_test.py b/tests/languages/script_test.py similarity index 63% rename from tests/languages/unsupported_script_test.py rename to tests/languages/script_test.py index b15b67e7..a02f615a 100644 --- a/tests/languages/unsupported_script_test.py +++ b/tests/languages/script_test.py @@ -1,14 +1,14 @@ from __future__ import annotations -from pre_commit.languages import unsupported_script +from pre_commit.languages import script from pre_commit.util import make_executable from testing.language_helpers import run_language -def test_unsupported_script_language(tmp_path): +def test_script_language(tmp_path): exe = tmp_path.joinpath('main') exe.write_text('#!/usr/bin/env bash\necho hello hello world\n') make_executable(exe) expected = (0, b'hello hello world\n') - assert run_language(tmp_path, unsupported_script, 'main') == expected + assert run_language(tmp_path, script, 'main') == expected diff --git a/tests/languages/system_test.py b/tests/languages/system_test.py new file mode 100644 index 00000000..dcd9cf1e --- /dev/null +++ b/tests/languages/system_test.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from pre_commit.languages import system +from testing.language_helpers import run_language + + +def test_system_language(tmp_path): + expected = (0, b'hello hello world\n') + assert run_language(tmp_path, system, 'echo hello hello world') == expected diff --git a/tests/languages/unsupported_test.py b/tests/languages/unsupported_test.py deleted file mode 100644 index 7f8461e0..00000000 --- a/tests/languages/unsupported_test.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import annotations - -from pre_commit.languages import unsupported -from testing.language_helpers import run_language - - -def test_unsupported_language(tmp_path): - expected = (0, b'hello hello world\n') - ret = run_language(tmp_path, unsupported, 'echo hello hello world') - assert ret == expected diff --git a/tests/main_test.py b/tests/main_test.py index fed085fc..945349fa 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -1,7 +1,6 @@ from __future__ import annotations import argparse -import contextlib import os.path from unittest import mock @@ -9,7 +8,6 @@ import pytest import pre_commit.constants as C from pre_commit import main -from pre_commit.commands import hazmat from pre_commit.errors import FatalError from pre_commit.util import cmd_output from testing.auto_namedtuple import auto_namedtuple @@ -99,9 +97,11 @@ CMDS = tuple(fn.replace('_', '-') for fn in FNS) @pytest.fixture def mock_commands(): - with contextlib.ExitStack() as ctx: - mcks = {f: ctx.enter_context(mock.patch.object(main, f)) for f in FNS} - yield auto_namedtuple(**mcks) + mcks = {fn: mock.patch.object(main, fn).start() for fn in FNS} + ret = auto_namedtuple(**mcks) + yield ret + for mck in ret: + mck.stop() @pytest.fixture @@ -158,17 +158,6 @@ def test_all_cmds(command, mock_commands, mock_store_dir): assert_only_one_mock_called(mock_commands) -def test_hazmat(mock_store_dir): - with mock.patch.object(hazmat, 'impl') as mck: - main.main(('hazmat', 'cd', 'subdir', '--', 'cmd', '--', 'f1', 'f2')) - assert mck.call_count == 1 - (arg,), dct = mck.call_args - assert dct == {} - assert arg.tool == 'cd' - assert arg.subdir == 'subdir' - assert arg.cmd == ['cmd', '--', 'f1', 'f2'] - - def test_try_repo(mock_store_dir): with mock.patch.object(main, 'try_repo') as patch: main.main(('try-repo', '.')) diff --git a/tests/parse_shebang_test.py b/tests/parse_shebang_test.py index bd4384df..dd97ca5d 100644 --- a/tests/parse_shebang_test.py +++ b/tests/parse_shebang_test.py @@ -133,17 +133,17 @@ def test_normalize_cmd_PATH(): def test_normalize_cmd_shebang(in_tmpdir): - us = sys.executable.replace(os.sep, '/') - path = write_executable(us) - assert parse_shebang.normalize_cmd((path,)) == (us, path) + echo = _echo_exe().replace(os.sep, '/') + path = write_executable(echo) + assert parse_shebang.normalize_cmd((path,)) == (echo, path) def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir): - us = sys.executable.replace(os.sep, '/') - path = write_executable(us) + echo = _echo_exe().replace(os.sep, '/') + path = write_executable(echo) with bin_on_path(): ret = parse_shebang.normalize_cmd(('run',)) - assert ret == (us, os.path.abspath(path)) + assert ret == (echo, os.path.abspath(path)) def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir): diff --git a/tests/repository_test.py b/tests/repository_test.py index 5d71c3e4..b8dde99b 100644 --- a/tests/repository_test.py +++ b/tests/repository_test.py @@ -9,6 +9,7 @@ from unittest import mock import cfgv import pytest +import re_assert import pre_commit.constants as C from pre_commit import lang_base @@ -17,7 +18,7 @@ from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import load_manifest from pre_commit.hook import Hook from pre_commit.languages import python -from pre_commit.languages import unsupported +from pre_commit.languages import system from pre_commit.prefix import Prefix from pre_commit.repository import _hook_installed from pre_commit.repository import all_hooks @@ -26,6 +27,7 @@ from pre_commit.util import cmd_output from pre_commit.util import cmd_output_b from testing.fixtures import make_config_from_repo from testing.fixtures import make_repo +from testing.fixtures import modify_manifest from testing.language_helpers import run_language from testing.util import cwd from testing.util import get_resource_path @@ -80,6 +82,31 @@ def _test_hook_repo( assert out == expected +def test_python_venv_deprecation(store, caplog): + config = { + 'repo': 'local', + 'hooks': [{ + 'id': 'example', + 'name': 'example', + 'language': 'python_venv', + 'entry': 'echo hi', + }], + } + _get_hook(config, store, 'example') + assert caplog.messages[-1] == ( + '`repo: local` uses deprecated `language: python_venv`. ' + 'This is an alias for `language: python`. ' + 'Often `pre-commit autoupdate --repo local` will fix this.' + ) + + +def test_system_hook_with_spaces(tempdir_factory, store): + _test_hook_repo( + tempdir_factory, store, 'system_hook_with_spaces_repo', + 'system-hook-with-spaces', [os.devnull], b'Hello World\n', + ) + + def test_missing_executable(tempdir_factory, store): _test_hook_repo( tempdir_factory, store, 'not_found_exe', @@ -215,16 +242,16 @@ def test_unknown_keys(store, caplog): assert msg == 'Unexpected key(s) present on local => too-much: foo, hello' -def test_reinstall(tempdir_factory, store, caplog): +def test_reinstall(tempdir_factory, store, log_info_mock): path = make_repo(tempdir_factory, 'python_hooks_repo') config = make_config_from_repo(path) _get_hook(config, store, 'foo') # We print some logging during clone (1) + install (3) - assert len(caplog.record_tuples) == 4 - caplog.clear() + assert log_info_mock.call_count == 4 + log_info_mock.reset_mock() # Reinstall on another run should not trigger another install _get_hook(config, store, 'foo') - assert len(caplog.record_tuples) == 0 + assert log_info_mock.call_count == 0 def test_control_c_control_c_on_install(tempdir_factory, store): @@ -406,6 +433,32 @@ def test_hook_id_not_present(tempdir_factory, store, caplog): ) +def test_too_new_version(tempdir_factory, store, caplog): + path = make_repo(tempdir_factory, 'script_hooks_repo') + with modify_manifest(path) as manifest: + manifest[0]['minimum_pre_commit_version'] = '999.0.0' + config = make_config_from_repo(path) + with pytest.raises(SystemExit): + _get_hook(config, store, 'bash_hook') + _, msg = caplog.messages + pattern = re_assert.Matches( + r'^The hook `bash_hook` requires pre-commit version 999\.0\.0 but ' + r'version \d+\.\d+\.\d+ is installed. ' + r'Perhaps run `pip install --upgrade pre-commit`\.$', + ) + pattern.assert_matches(msg) + + +@pytest.mark.parametrize('version', ('0.1.0', C.VERSION)) +def test_versions_ok(tempdir_factory, store, version): + path = make_repo(tempdir_factory, 'script_hooks_repo') + with modify_manifest(path) as manifest: + manifest[0]['minimum_pre_commit_version'] = version + config = make_config_from_repo(path) + # Should succeed + _get_hook(config, store, 'bash_hook') + + def test_manifest_hooks(tempdir_factory, store): path = make_repo(tempdir_factory, 'script_hooks_repo') config = make_config_from_repo(path) @@ -424,7 +477,7 @@ def test_manifest_hooks(tempdir_factory, store): exclude_types=[], files='', id='bash_hook', - language='unsupported_script', + language='script', language_version='default', log_file='', minimum_pre_commit_version='0', @@ -457,7 +510,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog): 'hooks': [{ 'id': 'system-hook', 'name': 'system-hook', - 'language': 'unsupported', + 'language': 'system', 'entry': 'python3 -c "import sys; print(sys.version)"', 'language_version': 'python3.10', }], @@ -469,7 +522,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog): msg, = caplog.messages assert msg == ( 'The hook `system-hook` specifies `language_version` but is using ' - 'language `unsupported` which does not install an environment. ' + 'language `system` which does not install an environment. ' 'Perhaps you meant to use a specific language?' ) @@ -480,7 +533,7 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog): 'hooks': [{ 'id': 'system-hook', 'name': 'system-hook', - 'language': 'unsupported', + 'language': 'system', 'entry': 'python3 -c "import sys; print(sys.version)"', 'additional_dependencies': ['astpretty'], }], @@ -492,28 +545,17 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog): msg, = caplog.messages assert msg == ( 'The hook `system-hook` specifies `additional_dependencies` but is ' - 'using language `unsupported` which does not install an environment. ' + 'using language `system` which does not install an environment. ' 'Perhaps you meant to use a specific language?' ) def test_args_with_spaces_and_quotes(tmp_path): ret = run_language( - tmp_path, unsupported, + tmp_path, system, f"{shlex.quote(sys.executable)} -c 'import sys; print(sys.argv[1:])'", ('i have spaces', 'and"\'quotes', '$and !this'), ) expected = b"['i have spaces', 'and\"\\'quotes', '$and !this']\n" assert ret == (0, expected) - - -def test_hazmat(tmp_path): - ret = run_language( - tmp_path, unsupported, - f'pre-commit hazmat ignore-exit-code {shlex.quote(sys.executable)} ' - f"-c 'import sys; raise SystemExit(sys.argv[1:])'", - ('f1', 'f2'), - ) - expected = b"['f1', 'f2']\n" - assert ret == (0, expected) diff --git a/tests/staged_files_only_test.py b/tests/staged_files_only_test.py index cd2f6387..58dbe5ac 100644 --- a/tests/staged_files_only_test.py +++ b/tests/staged_files_only_test.py @@ -358,21 +358,6 @@ def test_crlf(in_git_dir, patch_dir, crlf_before, crlf_after, autocrlf): assert_no_diff() -@pytest.mark.parametrize('autocrlf', ('true', 'input')) -def test_crlf_diff_only(in_git_dir, patch_dir, autocrlf): - # due to a quirk (?) in git -- a diff only in crlf does not show but - # still results in an exit code of `1` - # we treat this as "no diff" -- though ideally it would discard the diff - # while committing - cmd_output('git', 'config', '--local', 'core.autocrlf', autocrlf) - - _write(b'1\r\n2\r\n3\r\n') - cmd_output('git', 'add', 'foo') - _write(b'1\n2\n3\n') - with staged_files_only(patch_dir): - pass - - def test_whitespace_errors(in_git_dir, patch_dir): cmd_output('git', 'config', '--local', 'apply.whitespace', 'error') test_crlf(in_git_dir, patch_dir, True, True, 'true') diff --git a/tests/store_test.py b/tests/store_test.py index 13f198ea..eaab9400 100644 --- a/tests/store_test.py +++ b/tests/store_test.py @@ -1,15 +1,12 @@ from __future__ import annotations -import logging import os.path -import shlex import sqlite3 import stat from unittest import mock import pytest -import pre_commit.constants as C from pre_commit import git from pre_commit.store import _get_default_directory from pre_commit.store import _LOCAL_RESOURCES @@ -22,17 +19,6 @@ from testing.util import git_commit from testing.util import xfailif_windows -def _select_all_configs(store: Store) -> list[str]: - with store.connect() as db: - rows = db.execute('SELECT * FROM configs').fetchall() - return [path for path, in rows] - - -def _select_all_repos(store: Store) -> list[tuple[str, str, str]]: - with store.connect() as db: - return db.execute('SELECT repo, ref, path FROM repos').fetchall() - - def test_our_session_fixture_works(): """There's a session fixture which makes `Store` invariantly raise to prevent writing to the home directory. @@ -79,7 +65,7 @@ def test_store_init(store): assert text_line in readme_contents -def test_clone(store, tempdir_factory, caplog): +def test_clone(store, tempdir_factory, log_info_mock): path = git_dir(tempdir_factory) with cwd(path): git_commit() @@ -88,7 +74,7 @@ def test_clone(store, tempdir_factory, caplog): ret = store.clone(path, rev) # Should have printed some stuff - assert caplog.record_tuples[0][-1].startswith( + assert log_info_mock.call_args_list[0][0][0].startswith( 'Initializing environment for ', ) @@ -102,73 +88,7 @@ def test_clone(store, tempdir_factory, caplog): assert git.head_rev(ret) == rev # Assert there's an entry in the sqlite db for this - assert _select_all_repos(store) == [(path, rev, ret)] - - -def test_warning_for_deprecated_stages_on_init(store, tempdir_factory, caplog): - manifest = '''\ -- id: hook1 - name: hook1 - language: system - entry: echo hook1 - stages: [commit, push] -- id: hook2 - name: hook2 - language: system - entry: echo hook2 - stages: [push, merge-commit] -''' - - path = git_dir(tempdir_factory) - with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f: - f.write(manifest) - cmd_output('git', 'add', '.', cwd=path) - git_commit(cwd=path) - rev = git.head_rev(path) - - store.clone(path, rev) - assert caplog.record_tuples[1] == ( - 'pre_commit', - logging.WARNING, - f'repo `{path}` uses deprecated stage names ' - f'(commit, push, merge-commit) which will be removed in a future ' - f'version. ' - f'Hint: often `pre-commit autoupdate --repo {shlex.quote(path)}` ' - f'will fix this. ' - f'if it does not -- consider reporting an issue to that repo.', - ) - - # should not re-warn - caplog.clear() - store.clone(path, rev) - assert caplog.record_tuples == [] - - -def test_no_warning_for_non_deprecated_stages_on_init( - store, tempdir_factory, caplog, -): - manifest = '''\ -- id: hook1 - name: hook1 - language: system - entry: echo hook1 - stages: [pre-commit, pre-push] -- id: hook2 - name: hook2 - language: system - entry: echo hook2 - stages: [pre-push, pre-merge-commit] -''' - - path = git_dir(tempdir_factory) - with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f: - f.write(manifest) - cmd_output('git', 'add', '.', cwd=path) - git_commit(cwd=path) - rev = git.head_rev(path) - - store.clone(path, rev) - assert logging.WARNING not in {tup[1] for tup in caplog.record_tuples} + assert store.select_all_repos() == [(path, rev, ret)] def test_clone_cleans_up_on_checkout_failure(store): @@ -198,7 +118,7 @@ def test_clone_when_repo_already_exists(store): def test_clone_shallow_failure_fallback_to_complete( store, tempdir_factory, - caplog, + log_info_mock, ): path = git_dir(tempdir_factory) with cwd(path): @@ -214,7 +134,7 @@ def test_clone_shallow_failure_fallback_to_complete( ret = store.clone(path, rev) # Should have printed some stuff - assert caplog.record_tuples[0][-1].startswith( + assert log_info_mock.call_args_list[0][0][0].startswith( 'Initializing environment for ', ) @@ -228,7 +148,7 @@ def test_clone_shallow_failure_fallback_to_complete( assert git.head_rev(ret) == rev # Assert there's an entry in the sqlite db for this - assert _select_all_repos(store) == [(path, rev, ret)] + assert store.select_all_repos() == [(path, rev, ret)] def test_clone_tag_not_on_mainline(store, tempdir_factory): @@ -265,7 +185,7 @@ def test_db_repo_name(store): def test_local_resources_reflects_reality(): on_disk = { - res.removeprefix('empty_template_') + res[len('empty_template_'):] for res in os.listdir('pre_commit/resources') if res.startswith('empty_template_') } @@ -276,7 +196,7 @@ def test_mark_config_as_used(store, tmpdir): with tmpdir.as_cwd(): f = tmpdir.join('f').ensure() store.mark_config_used('f') - assert _select_all_configs(store) == [f.strpath] + assert store.select_all_configs() == [f.strpath] def test_mark_config_as_used_idempotent(store, tmpdir): @@ -286,12 +206,21 @@ def test_mark_config_as_used_idempotent(store, tmpdir): def test_mark_config_as_used_does_not_exist(store): store.mark_config_used('f') - assert _select_all_configs(store) == [] + assert store.select_all_configs() == [] + + +def _simulate_pre_1_14_0(store): + with store.connect() as db: + db.executescript('DROP TABLE configs') + + +def test_select_all_configs_roll_forward(store): + _simulate_pre_1_14_0(store) + assert store.select_all_configs() == [] def test_mark_config_as_used_roll_forward(store, tmpdir): - with store.connect() as db: # simulate pre-1.14.0 - db.executescript('DROP TABLE configs') + _simulate_pre_1_14_0(store) test_mark_config_as_used(store, tmpdir) @@ -316,7 +245,7 @@ def test_mark_config_as_used_readonly(tmpdir): assert store.readonly # should be skipped due to readonly store.mark_config_used(str(cfg)) - assert _select_all_configs(store) == [] + assert store.select_all_configs() == [] def test_clone_with_recursive_submodules(store, tmp_path): diff --git a/tests/xargs_test.py b/tests/xargs_test.py index e8000b25..7c41f98c 100644 --- a/tests/xargs_test.py +++ b/tests/xargs_test.py @@ -1,7 +1,6 @@ from __future__ import annotations import concurrent.futures -import multiprocessing import os import sys import time @@ -13,40 +12,6 @@ from pre_commit import parse_shebang from pre_commit import xargs -def test_cpu_count_sched_getaffinity_exists(): - with mock.patch.object( - os, 'sched_getaffinity', create=True, return_value=set(range(345)), - ): - assert xargs.cpu_count() == 345 - - -@pytest.fixture -def no_sched_getaffinity(): - # Simulates an OS without os.sched_getaffinity available (mac/windows) - # https://docs.python.org/3/library/os.html#interface-to-the-scheduler - with mock.patch.object( - os, - 'sched_getaffinity', - create=True, - side_effect=AttributeError, - ): - yield - - -def test_cpu_count_multiprocessing_cpu_count_implemented(no_sched_getaffinity): - with mock.patch.object(multiprocessing, 'cpu_count', return_value=123): - assert xargs.cpu_count() == 123 - - -def test_cpu_count_multiprocessing_cpu_count_not_implemented( - no_sched_getaffinity, -): - with mock.patch.object( - multiprocessing, 'cpu_count', side_effect=NotImplementedError, - ): - assert xargs.cpu_count() == 1 - - @pytest.mark.parametrize( ('env', 'expected'), ( @@ -182,15 +147,6 @@ def test_xargs_retcode_normal(): assert ret == 5 -@pytest.mark.xfail(sys.platform == 'win32', reason='posix only') -def test_xargs_retcode_killed_by_signal(): - ret, _ = xargs.xargs( - parse_shebang.normalize_cmd(('bash', '-c', 'kill -9 $$', '--')), - ('foo', 'bar'), - ) - assert ret == -9 - - def test_xargs_concurrency(): bash_cmd = parse_shebang.normalize_cmd(('bash', '-c')) print_pid = ('sleep 0.5 && echo $$',) diff --git a/tests/yaml_rewrite_test.py b/tests/yaml_rewrite_test.py deleted file mode 100644 index d0f6841c..00000000 --- a/tests/yaml_rewrite_test.py +++ /dev/null @@ -1,47 +0,0 @@ -from __future__ import annotations - -import pytest - -from pre_commit.yaml import yaml_compose -from pre_commit.yaml_rewrite import MappingKey -from pre_commit.yaml_rewrite import MappingValue -from pre_commit.yaml_rewrite import match -from pre_commit.yaml_rewrite import SequenceItem - - -def test_match_produces_scalar_values_only(): - src = '''\ -- name: foo -- name: [not, foo] # not a scalar: should be skipped! -- name: bar -''' - matcher = (SequenceItem(), MappingValue('name')) - ret = [n.value for n in match(yaml_compose(src), matcher)] - assert ret == ['foo', 'bar'] - - -@pytest.mark.parametrize('cls', (MappingKey, MappingValue)) -def test_mapping_not_a_map(cls): - m = cls('s') - assert list(m.match(yaml_compose('[foo]'))) == [] - - -def test_sequence_item_not_a_sequence(): - assert list(SequenceItem().match(yaml_compose('s: val'))) == [] - - -def test_mapping_key(): - m = MappingKey('s') - ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))] - assert ret == ['s'] - - -def test_mapping_value(): - m = MappingValue('s') - ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))] - assert ret == ['val'] - - -def test_sequence_item(): - ret = [n.value for n in SequenceItem().match(yaml_compose('[a, b, c]'))] - assert ret == ['a', 'b', 'c']