diff --git a/.github/workflows/languages.yaml b/.github/workflows/languages.yaml index 7d50535f..be8963ba 100644 --- a/.github/workflows/languages.yaml +++ b/.github/workflows/languages.yaml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: '3.10' - name: install deps run: python -mpip install -e . -r requirements-dev.txt - name: vars @@ -36,10 +36,10 @@ jobs: matrix: include: ${{ fromJSON(needs.vars.outputs.languages) }} steps: - - uses: asottile/workflows/.github/actions/fast-checkout@v1.4.0 + - uses: asottile/workflows/.github/actions/fast-checkout@v1.8.1 - uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: '3.10' - run: echo "$CONDA\Scripts" >> "$GITHUB_PATH" shell: bash @@ -65,6 +65,8 @@ jobs: if: matrix.os == 'windows-latest' && matrix.language == 'perl' - uses: haskell/actions/setup@v2 if: matrix.language == 'haskell' + - uses: r-lib/actions/setup-r@v2 + if: matrix.os == 'ubuntu-latest' && matrix.language == 'r' - name: install deps run: python -mpip install -e . -r requirements-dev.txt diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2355b662..02b11ae2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,12 +12,12 @@ concurrency: jobs: main-windows: - uses: asottile/workflows/.github/workflows/tox.yml@v1.6.0 + uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 with: - env: '["py39"]' + env: '["py310"]' os: windows-latest main-linux: - uses: asottile/workflows/.github/workflows/tox.yml@v1.6.0 + uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 with: - env: '["py39", "py310", "py311", "py312"]' + env: '["py310", "py311", "py312", "py313"]' os: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1a9a8fef..3654066f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -10,34 +10,34 @@ repos: - id: name-tests-test - id: requirements-txt-fixer - repo: https://github.com/asottile/setup-cfg-fmt - rev: v2.5.0 + rev: v3.2.0 hooks: - id: setup-cfg-fmt - repo: https://github.com/asottile/reorder-python-imports - rev: v3.13.0 + rev: v3.16.0 hooks: - id: reorder-python-imports - exclude: ^(pre_commit/resources/|testing/resources/python3_hooks_repo/) - args: [--py39-plus, --add-import, 'from __future__ import annotations'] + exclude: ^pre_commit/resources/ + args: [--py310-plus, --add-import, 'from __future__ import annotations'] - repo: https://github.com/asottile/add-trailing-comma - rev: v3.1.0 + rev: v4.0.0 hooks: - id: add-trailing-comma - repo: https://github.com/asottile/pyupgrade - rev: v3.16.0 + rev: v3.21.2 hooks: - id: pyupgrade - args: [--py39-plus] + args: [--py310-plus] - repo: https://github.com/hhatto/autopep8 - rev: v2.3.1 + rev: v2.3.2 hooks: - id: autopep8 - repo: https://github.com/PyCQA/flake8 - rev: 7.1.0 + rev: 7.3.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.0 + rev: v1.19.1 hooks: - id: mypy additional_dependencies: [types-pyyaml] diff --git a/CHANGELOG.md b/CHANGELOG.md index 49094bbb..879ae073 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,119 @@ +4.5.1 - 2025-12-16 +================== + +### Fixes +- Fix `language: python` with `repo: local` without `additional_dependencies`. + - #3597 PR by @asottile. + +4.5.0 - 2025-11-22 +================== + +### Features +- Add `pre-commit hazmat`. + - #3585 PR by @asottile. + +4.4.0 - 2025-11-08 +================== + +### Features +- Add `--fail-fast` option to `pre-commit run`. + - #3528 PR by @JulianMaurin. +- Upgrade `ruby-build` / `rbenv`. + - #3566 PR by @asottile. + - #3565 issue by @MRigal. +- Add `language: unsupported` / `language: unsupported_script` as aliases + for `language: system` / `language: script` (which will eventually be + deprecated). + - #3577 PR by @asottile. +- Add support docker-in-docker detection for cgroups v2. + - #3535 PR by @br-rhrbacek. + - #3360 issue by @JasonAlt. + +### Fixes +- Handle when docker gives `SecurityOptions: null`. + - #3537 PR by @asottile. + - #3514 issue by @jenstroeger. +- Fix error context for invalid `stages` in `.pre-commit-config.yaml`. + - #3576 PR by @asottile. + +4.3.0 - 2025-08-09 +================== + +### Features +- `language: docker` / `language: docker_image`: detect rootless docker. + - #3446 PR by @matthewhughes934. + - #1243 issue by @dkolepp. +- `language: julia`: avoid `startup.jl` when executing hooks. + - #3496 PR by @ericphanson. +- `language: dart`: support latest dart versions which require a higher sdk + lower bound. + - #3507 PR by @bc-lee. + +4.2.0 - 2025-03-18 +================== + +### Features +- For `language: python` first attempt a versioned python executable for + the default language version before consulting a potentially unversioned + `sys.executable`. + - #3430 PR by @asottile. + +### Fixes +- Handle error during conflict detection when a file is named "HEAD" + - #3425 PR by @tusharsadhwani. + +4.1.0 - 2025-01-20 +================== + +### Features +- Add `language: julia`. + - #3348 PR by @fredrikekre. + - #2689 issue @jmuchovej. + +### Fixes +- Disable automatic toolchain switching for `language: golang`. + - #3304 PR by @AleksaC. + - #3300 issue by @AleksaC. + - #3149 issue by @nijel. +- Fix `language: r` installation when initiated by RStudio. + - #3389 PR by @lorenzwalthert. + - #3385 issue by @lorenzwalthert. + + +4.0.1 - 2024-10-08 +================== + +### Fixes +- Fix `pre-commit migrate-config` for unquoted deprecated stages names with + purelib `pyyaml`. + - #3324 PR by @asottile. + - pre-commit-ci/issues#234 issue by @lorenzwalthert. + +4.0.0 - 2024-10-05 +================== + +### Features +- Improve `pre-commit migrate-config` to handle more yaml formats. + - #3301 PR by @asottile. +- Handle `stages` deprecation in `pre-commit migrate-config`. + - #3302 PR by @asottile. + - #2732 issue by @asottile. +- Upgrade `ruby-build`. + - #3199 PR by @ThisGuyCodes. +- Add "sensible regex" warnings to `repo: meta`. + - #3311 PR by @asottile. +- Add warnings for deprecated `stages` (`commit` -> `pre-commit`, `push` -> + `pre-push`, `merge-commit` -> `pre-merge-commit`). + - #3312 PR by @asottile. + - #3313 PR by @asottile. + - #3315 PR by @asottile. + - #2732 issue by @asottile. + +### Updating +- `language: python_venv` has been removed -- use `language: python` instead. + - #3320 PR by @asottile. + - #2734 issue by @asottile. + 3.8.0 - 2024-07-28 ================== @@ -81,7 +197,7 @@ - Use `time.monotonic()` for more accurate hook timing. - #3024 PR by @adamchainz. -### Migrating +### Updating - Require npm 6.x+ for `language: node` hooks. - #2996 PR by @RoelAdriaans. - #1983 issue by @henryiii. diff --git a/pre_commit/all_languages.py b/pre_commit/all_languages.py index 476bad9d..166bc167 100644 --- a/pre_commit/all_languages.py +++ b/pre_commit/all_languages.py @@ -10,6 +10,7 @@ from pre_commit.languages import dotnet from pre_commit.languages import fail from pre_commit.languages import golang from pre_commit.languages import haskell +from pre_commit.languages import julia from pre_commit.languages import lua from pre_commit.languages import node from pre_commit.languages import perl @@ -18,9 +19,9 @@ from pre_commit.languages import python from pre_commit.languages import r from pre_commit.languages import ruby from pre_commit.languages import rust -from pre_commit.languages import script from pre_commit.languages import swift -from pre_commit.languages import system +from pre_commit.languages import unsupported +from pre_commit.languages import unsupported_script languages: dict[str, Language] = { @@ -33,6 +34,7 @@ languages: dict[str, Language] = { 'fail': fail, 'golang': golang, 'haskell': haskell, + 'julia': julia, 'lua': lua, 'node': node, 'perl': perl, @@ -41,10 +43,8 @@ languages: dict[str, Language] = { 'r': r, 'ruby': ruby, 'rust': rust, - 'script': script, 'swift': swift, - 'system': system, - # TODO: fully deprecate `python_venv` - 'python_venv': python, + 'unsupported': unsupported, + 'unsupported_script': unsupported_script, } language_names = sorted(languages) diff --git a/pre_commit/clientlib.py b/pre_commit/clientlib.py index a49465e8..51f14d26 100644 --- a/pre_commit/clientlib.py +++ b/pre_commit/clientlib.py @@ -2,9 +2,11 @@ from __future__ import annotations import functools import logging +import os.path import re import shlex import sys +from collections.abc import Callable from collections.abc import Sequence from typing import Any from typing import NamedTuple @@ -70,6 +72,43 @@ def transform_stage(stage: str) -> str: return _STAGES.get(stage, stage) +MINIMAL_MANIFEST_SCHEMA = cfgv.Array( + cfgv.Map( + 'Hook', 'id', + cfgv.Required('id', cfgv.check_string), + cfgv.Optional('stages', cfgv.check_array(cfgv.check_string), []), + ), +) + + +def warn_for_stages_on_repo_init(repo: str, directory: str) -> None: + try: + manifest = cfgv.load_from_filename( + os.path.join(directory, C.MANIFEST_FILE), + schema=MINIMAL_MANIFEST_SCHEMA, + load_strategy=yaml_load, + exc_tp=InvalidManifestError, + ) + except InvalidManifestError: + return # they'll get a better error message when it actually loads! + + legacy_stages = {} # sorted set + for hook in manifest: + for stage in hook.get('stages', ()): + if stage in _STAGES: + legacy_stages[stage] = True + + if legacy_stages: + logger.warning( + f'repo `{repo}` uses deprecated stage names ' + f'({", ".join(legacy_stages)}) which will be removed in a ' + f'future version. ' + f'Hint: often `pre-commit autoupdate --repo {shlex.quote(repo)}` ' + f'will fix this. ' + f'if it does not -- consider reporting an issue to that repo.', + ) + + class StagesMigrationNoDefault(NamedTuple): key: str default: Sequence[str] @@ -78,11 +117,12 @@ class StagesMigrationNoDefault(NamedTuple): if self.key not in dct: return - val = dct[self.key] - cfgv.check_array(cfgv.check_any)(val) + with cfgv.validate_context(f'At key: {self.key}'): + val = dct[self.key] + cfgv.check_array(cfgv.check_any)(val) - val = [transform_stage(v) for v in val] - cfgv.check_array(cfgv.check_one_of(STAGES))(val) + val = [transform_stage(v) for v in val] + cfgv.check_array(cfgv.check_one_of(STAGES))(val) def apply_default(self, dct: dict[str, Any]) -> None: if self.key not in dct: @@ -99,6 +139,94 @@ class StagesMigration(StagesMigrationNoDefault): super().apply_default(dct) +class DeprecatedStagesWarning(NamedTuple): + key: str + + def check(self, dct: dict[str, Any]) -> None: + if self.key not in dct: + return + + val = dct[self.key] + cfgv.check_array(cfgv.check_any)(val) + + legacy_stages = [stage for stage in val if stage in _STAGES] + if legacy_stages: + logger.warning( + f'hook id `{dct["id"]}` uses deprecated stage names ' + f'({", ".join(legacy_stages)}) which will be removed in a ' + f'future version. ' + f'run: `pre-commit migrate-config` to automatically fix this.', + ) + + def apply_default(self, dct: dict[str, Any]) -> None: + pass + + def remove_default(self, dct: dict[str, Any]) -> None: + raise NotImplementedError + + +class DeprecatedDefaultStagesWarning(NamedTuple): + key: str + + def check(self, dct: dict[str, Any]) -> None: + if self.key not in dct: + return + + val = dct[self.key] + cfgv.check_array(cfgv.check_any)(val) + + legacy_stages = [stage for stage in val if stage in _STAGES] + if legacy_stages: + logger.warning( + f'top-level `default_stages` uses deprecated stage names ' + f'({", ".join(legacy_stages)}) which will be removed in a ' + f'future version. ' + f'run: `pre-commit migrate-config` to automatically fix this.', + ) + + def apply_default(self, dct: dict[str, Any]) -> None: + pass + + def remove_default(self, dct: dict[str, Any]) -> None: + raise NotImplementedError + + +def _translate_language(name: str) -> str: + return { + 'system': 'unsupported', + 'script': 'unsupported_script', + }.get(name, name) + + +class LanguageMigration(NamedTuple): # remove + key: str + check_fn: Callable[[object], None] + + def check(self, dct: dict[str, Any]) -> None: + if self.key not in dct: + return + + with cfgv.validate_context(f'At key: {self.key}'): + self.check_fn(_translate_language(dct[self.key])) + + def apply_default(self, dct: dict[str, Any]) -> None: + if self.key not in dct: + return + + dct[self.key] = _translate_language(dct[self.key]) + + def remove_default(self, dct: dict[str, Any]) -> None: + raise NotImplementedError + + +class LanguageMigrationRequired(LanguageMigration): # replace with Required + def check(self, dct: dict[str, Any]) -> None: + if self.key not in dct: + raise cfgv.ValidationError(f'Missing required key: {self.key}') + + super().check(dct) + + MANIFEST_HOOK_DICT = cfgv.Map( 'Hook', 'id', @@ -112,7 +240,7 @@ MANIFEST_HOOK_DICT = cfgv.Map( cfgv.Required('id', cfgv.check_string), cfgv.Required('name', cfgv.check_string), cfgv.Required('entry', cfgv.check_string), - cfgv.Required('language', cfgv.check_one_of(language_names)), + LanguageMigrationRequired('language', cfgv.check_one_of(language_names)), cfgv.Optional('alias', cfgv.check_string, ''), cfgv.Optional('files', check_string_regex, ''), @@ -142,10 +270,19 @@ class InvalidManifestError(FatalError): pass +def _load_manifest_forward_compat(contents: str) -> object: + obj = yaml_load(contents) + if isinstance(obj, dict): + check_min_version('5') + raise AssertionError('unreachable') + else: + return obj + + load_manifest = functools.partial( cfgv.load_from_filename, schema=MANIFEST_SCHEMA, - load_strategy=yaml_load, + load_strategy=_load_manifest_forward_compat, exc_tp=InvalidManifestError, ) @@ -267,12 +404,20 @@ class NotAllowed(cfgv.OptionalNoDefault): raise cfgv.ValidationError(f'{self.key!r} cannot be overridden') +_COMMON_HOOK_WARNINGS = ( + OptionalSensibleRegexAtHook('files', cfgv.check_string), + OptionalSensibleRegexAtHook('exclude', cfgv.check_string), + DeprecatedStagesWarning('stages'), +) + META_HOOK_DICT = cfgv.Map( 'Hook', 'id', cfgv.Required('id', cfgv.check_string), cfgv.Required('id', cfgv.check_one_of(tuple(k for k, _ in _meta))), - # language must be system - cfgv.Optional('language', cfgv.check_one_of({'system'}), 'system'), + # language must be `unsupported` + cfgv.Optional( + 'language', cfgv.check_one_of({'unsupported'}), 'unsupported', + ), # entry cannot be overridden NotAllowed('entry', cfgv.check_any), *( @@ -289,6 +434,7 @@ META_HOOK_DICT = cfgv.Map( item for item in MANIFEST_HOOK_DICT.items ), + *_COMMON_HOOK_WARNINGS, ) CONFIG_HOOK_DICT = cfgv.Map( 'Hook', 'id', @@ -304,18 +450,17 @@ CONFIG_HOOK_DICT = cfgv.Map( for item in MANIFEST_HOOK_DICT.items if item.key != 'id' if item.key != 'stages' + if item.key != 'language' # remove ), StagesMigrationNoDefault('stages', []), - OptionalSensibleRegexAtHook('files', cfgv.check_string), - OptionalSensibleRegexAtHook('exclude', cfgv.check_string), + LanguageMigration('language', cfgv.check_one_of(language_names)), # remove + *_COMMON_HOOK_WARNINGS, ) LOCAL_HOOK_DICT = cfgv.Map( 'Hook', 'id', *MANIFEST_HOOK_DICT.items, - - OptionalSensibleRegexAtHook('files', cfgv.check_string), - OptionalSensibleRegexAtHook('exclude', cfgv.check_string), + *_COMMON_HOOK_WARNINGS, ) CONFIG_REPO_DICT = cfgv.Map( 'Repository', 'repo', @@ -368,6 +513,7 @@ CONFIG_SCHEMA = cfgv.Map( 'default_language_version', DEFAULT_LANGUAGE_VERSION, {}, ), StagesMigration('default_stages', STAGES), + DeprecatedDefaultStagesWarning('default_stages'), cfgv.Optional('files', check_string_regex, ''), cfgv.Optional('exclude', check_string_regex, '^$'), cfgv.Optional('fail_fast', cfgv.check_bool, False), diff --git a/pre_commit/commands/gc.py b/pre_commit/commands/gc.py index 6892e097..975d5e4c 100644 --- a/pre_commit/commands/gc.py +++ b/pre_commit/commands/gc.py @@ -12,6 +12,7 @@ from pre_commit.clientlib import load_manifest from pre_commit.clientlib import LOCAL from pre_commit.clientlib import META from pre_commit.store import Store +from pre_commit.util import rmtree def _mark_used_repos( @@ -26,7 +27,8 @@ def _mark_used_repos( for hook in repo['hooks']: deps = hook.get('additional_dependencies') unused_repos.discard(( - store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION, + store.db_repo_name(repo['repo'], deps), + C.LOCAL_REPO_VERSION, )) else: key = (repo['repo'], repo['rev']) @@ -56,34 +58,41 @@ def _mark_used_repos( )) -def _gc_repos(store: Store) -> int: - configs = store.select_all_configs() - repos = store.select_all_repos() +def _gc(store: Store) -> int: + with store.exclusive_lock(), store.connect() as db: + store._create_configs_table(db) - # delete config paths which do not exist - dead_configs = [p for p in configs if not os.path.exists(p)] - live_configs = [p for p in configs if os.path.exists(p)] + repos = db.execute('SELECT repo, ref, path FROM repos').fetchall() + all_repos = {(repo, ref): path for repo, ref, path in repos} + unused_repos = set(all_repos) - all_repos = {(repo, ref): path for repo, ref, path in repos} - unused_repos = set(all_repos) - for config_path in live_configs: - try: - config = load_config(config_path) - except InvalidConfigError: - dead_configs.append(config_path) - continue - else: - for repo in config['repos']: - _mark_used_repos(store, all_repos, unused_repos, repo) + configs_rows = db.execute('SELECT path FROM configs').fetchall() + configs = [path for path, in configs_rows] - store.delete_configs(dead_configs) - for db_repo_name, ref in unused_repos: - store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)]) - return len(unused_repos) + dead_configs = [] + for config_path in configs: + try: + config = load_config(config_path) + except InvalidConfigError: + dead_configs.append(config_path) + continue + else: + for repo in config['repos']: + _mark_used_repos(store, all_repos, unused_repos, repo) + + paths = [(path,) for path in dead_configs] + db.executemany('DELETE FROM configs WHERE path = ?', paths) + + db.executemany( + 'DELETE FROM repos WHERE repo = ? and ref = ?', + sorted(unused_repos), + ) + for k in unused_repos: + rmtree(all_repos[k]) + + return len(unused_repos) def gc(store: Store) -> int: - with store.exclusive_lock(): - repos_removed = _gc_repos(store) - output.write_line(f'{repos_removed} repo(s) removed.') + output.write_line(f'{_gc(store)} repo(s) removed.') return 0 diff --git a/pre_commit/commands/hazmat.py b/pre_commit/commands/hazmat.py new file mode 100644 index 00000000..01b27ce6 --- /dev/null +++ b/pre_commit/commands/hazmat.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import argparse +import subprocess +from collections.abc import Sequence + +from pre_commit.parse_shebang import normalize_cmd + + +def add_parsers(parser: argparse.ArgumentParser) -> None: + subparsers = parser.add_subparsers(dest='tool') + + cd_parser = subparsers.add_parser( + 'cd', help='cd to a subdir and run the command', + ) + cd_parser.add_argument('subdir') + cd_parser.add_argument('cmd', nargs=argparse.REMAINDER) + + ignore_exit_code_parser = subparsers.add_parser( + 'ignore-exit-code', help='run the command but ignore the exit code', + ) + ignore_exit_code_parser.add_argument('cmd', nargs=argparse.REMAINDER) + + n1_parser = subparsers.add_parser( + 'n1', help='run the command once per filename', + ) + n1_parser.add_argument('cmd', nargs=argparse.REMAINDER) + + +def _cmd_filenames(cmd: tuple[str, ...]) -> tuple[ + tuple[str, ...], + tuple[str, ...], +]: + for idx, val in enumerate(reversed(cmd)): + if val == '--': + split = len(cmd) - idx + break + else: + raise SystemExit('hazmat entry must end with `--`') + + return cmd[:split - 1], cmd[split:] + + +def cd(subdir: str, cmd: tuple[str, ...]) -> int: + cmd, filenames = _cmd_filenames(cmd) + + prefix = f'{subdir}/' + new_filenames = [] + for filename in filenames: + if not filename.startswith(prefix): + raise SystemExit(f'unexpected file without {prefix=}: {filename}') + else: + new_filenames.append(filename.removeprefix(prefix)) + + cmd = normalize_cmd(cmd) + return subprocess.call((*cmd, *new_filenames), cwd=subdir) + + +def ignore_exit_code(cmd: tuple[str, ...]) -> int: + cmd = normalize_cmd(cmd) + subprocess.call(cmd) + return 0 + + +def n1(cmd: tuple[str, ...]) -> int: + cmd, filenames = _cmd_filenames(cmd) + cmd = normalize_cmd(cmd) + ret = 0 + for filename in filenames: + ret |= subprocess.call((*cmd, filename)) + return ret + + +def impl(args: argparse.Namespace) -> int: + args.cmd = tuple(args.cmd) + if args.tool == 'cd': + return cd(args.subdir, args.cmd) + elif args.tool == 'ignore-exit-code': + return ignore_exit_code(args.cmd) + elif args.tool == 'n1': + return n1(args.cmd) + else: + raise NotImplementedError(f'unexpected tool: {args.tool}') + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + add_parsers(parser) + args = parser.parse_args(argv) + + return impl(args) + + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/pre_commit/commands/hook_impl.py b/pre_commit/commands/hook_impl.py index 49a80b7b..de5c8f34 100644 --- a/pre_commit/commands/hook_impl.py +++ b/pre_commit/commands/hook_impl.py @@ -106,6 +106,7 @@ def _ns( hook=None, verbose=False, show_diff_on_failure=False, + fail_fast=False, ) diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py index 842fb3a7..b04c53a5 100644 --- a/pre_commit/commands/migrate_config.py +++ b/pre_commit/commands/migrate_config.py @@ -1,13 +1,21 @@ from __future__ import annotations -import re +import functools +import itertools import textwrap +from collections.abc import Callable import cfgv import yaml +from yaml.nodes import ScalarNode from pre_commit.clientlib import InvalidConfigError +from pre_commit.yaml import yaml_compose from pre_commit.yaml import yaml_load +from pre_commit.yaml_rewrite import MappingKey +from pre_commit.yaml_rewrite import MappingValue +from pre_commit.yaml_rewrite import match +from pre_commit.yaml_rewrite import SequenceItem def _is_header_line(line: str) -> bool: @@ -38,16 +46,69 @@ def _migrate_map(contents: str) -> str: return contents -def _migrate_sha_to_rev(contents: str) -> str: - return re.sub(r'(\n\s+)sha:', r'\1rev:', contents) +def _preserve_style(n: ScalarNode, *, s: str) -> str: + style = n.style or '' + return f'{style}{s}{style}' -def _migrate_python_venv(contents: str) -> str: - return re.sub( - r'(\n\s+)language: python_venv\b', - r'\1language: python', - contents, +def _fix_stage(n: ScalarNode) -> str: + return _preserve_style(n, s=f'pre-{n.value}') + + +def _migrate_composed(contents: str) -> str: + tree = yaml_compose(contents) + rewrites: list[tuple[ScalarNode, Callable[[ScalarNode], str]]] = [] + + # sha -> rev + sha_to_rev_replace = functools.partial(_preserve_style, s='rev') + sha_to_rev_matcher = ( + MappingValue('repos'), + SequenceItem(), + MappingKey('sha'), ) + for node in match(tree, sha_to_rev_matcher): + rewrites.append((node, sha_to_rev_replace)) + + # python_venv -> python + language_matcher = ( + MappingValue('repos'), + SequenceItem(), + MappingValue('hooks'), + SequenceItem(), + MappingValue('language'), + ) + python_venv_replace = functools.partial(_preserve_style, s='python') + for node in match(tree, language_matcher): + if node.value == 'python_venv': + rewrites.append((node, python_venv_replace)) + + # stages rewrites + default_stages_matcher = (MappingValue('default_stages'), SequenceItem()) + default_stages_match = match(tree, default_stages_matcher) + hook_stages_matcher = ( + MappingValue('repos'), + SequenceItem(), + MappingValue('hooks'), + SequenceItem(), + MappingValue('stages'), + SequenceItem(), + ) + hook_stages_match = match(tree, hook_stages_matcher) + for node in itertools.chain(default_stages_match, hook_stages_match): + if node.value in {'commit', 'push', 'merge-commit'}: + rewrites.append((node, _fix_stage)) + + rewrites.sort(reverse=True, key=lambda nf: nf[0].start_mark.index) + + src_parts = [] + end: int | None = None + for node, func in rewrites: + src_parts.append(contents[node.end_mark.index:end]) + src_parts.append(func(node)) + end = node.start_mark.index + src_parts.append(contents[:end]) + src_parts.reverse() + return ''.join(src_parts) def migrate_config(config_file: str, quiet: bool = False) -> int: @@ -62,8 +123,7 @@ def migrate_config(config_file: str, quiet: bool = False) -> int: raise cfgv.ValidationError(str(e)) contents = _migrate_map(contents) - contents = _migrate_sha_to_rev(contents) - contents = _migrate_python_venv(contents) + contents = _migrate_composed(contents) if contents != orig_contents: with open(config_file, 'w') as f: diff --git a/pre_commit/commands/run.py b/pre_commit/commands/run.py index 2a08dff0..8ab505ff 100644 --- a/pre_commit/commands/run.py +++ b/pre_commit/commands/run.py @@ -61,7 +61,7 @@ def filter_by_include_exclude( names: Iterable[str], include: str, exclude: str, -) -> Generator[str, None, None]: +) -> Generator[str]: include_re, exclude_re = re.compile(include), re.compile(exclude) return ( filename for filename in names @@ -84,7 +84,7 @@ class Classifier: types: Iterable[str], types_or: Iterable[str], exclude_types: Iterable[str], - ) -> Generator[str, None, None]: + ) -> Generator[str]: types = frozenset(types) types_or = frozenset(types_or) exclude_types = frozenset(exclude_types) @@ -97,7 +97,7 @@ class Classifier: ): yield filename - def filenames_for_hook(self, hook: Hook) -> Generator[str, None, None]: + def filenames_for_hook(self, hook: Hook) -> Generator[str]: return self.by_types( filter_by_include_exclude( self.filenames, @@ -298,7 +298,8 @@ def _run_hooks( verbose=args.verbose, use_color=args.color, ) retval |= current_retval - if current_retval and (config['fail_fast'] or hook.fail_fast): + fail_fast = (config['fail_fast'] or hook.fail_fast or args.fail_fast) + if current_retval and fail_fast: break if retval and args.show_diff_on_failure and prior_diff: if args.all_files: diff --git a/pre_commit/envcontext.py b/pre_commit/envcontext.py index 1f816cea..d4d24118 100644 --- a/pre_commit/envcontext.py +++ b/pre_commit/envcontext.py @@ -33,7 +33,7 @@ def format_env(parts: SubstitutionT, env: MutableMapping[str, str]) -> str: def envcontext( patch: PatchesT, _env: MutableMapping[str, str] | None = None, -) -> Generator[None, None, None]: +) -> Generator[None]: """In this context, `os.environ` is modified according to `patch`. `patch` is an iterable of 2-tuples (key, value): diff --git a/pre_commit/error_handler.py b/pre_commit/error_handler.py index 73e608b7..4f0e0573 100644 --- a/pre_commit/error_handler.py +++ b/pre_commit/error_handler.py @@ -68,7 +68,7 @@ def _log_and_exit( @contextlib.contextmanager -def error_handler() -> Generator[None, None, None]: +def error_handler() -> Generator[None]: try: yield except (Exception, KeyboardInterrupt) as e: diff --git a/pre_commit/file_lock.py b/pre_commit/file_lock.py index d3dafb4d..6223f869 100644 --- a/pre_commit/file_lock.py +++ b/pre_commit/file_lock.py @@ -3,8 +3,8 @@ from __future__ import annotations import contextlib import errno import sys +from collections.abc import Callable from collections.abc import Generator -from typing import Callable if sys.platform == 'win32': # pragma: no cover (windows) @@ -20,7 +20,7 @@ if sys.platform == 'win32': # pragma: no cover (windows) def _locked( fileno: int, blocked_cb: Callable[[], None], - ) -> Generator[None, None, None]: + ) -> Generator[None]: try: msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) except OSError: @@ -53,7 +53,7 @@ else: # pragma: win32 no cover def _locked( fileno: int, blocked_cb: Callable[[], None], - ) -> Generator[None, None, None]: + ) -> Generator[None]: try: fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB) except OSError: # pragma: no cover (tests are single-threaded) @@ -69,7 +69,7 @@ else: # pragma: win32 no cover def lock( path: str, blocked_cb: Callable[[], None], -) -> Generator[None, None, None]: +) -> Generator[None]: with open(path, 'a+') as f: with _locked(f.fileno(), blocked_cb): yield diff --git a/pre_commit/git.py b/pre_commit/git.py index 19aac387..ec1928f3 100644 --- a/pre_commit/git.py +++ b/pre_commit/git.py @@ -126,7 +126,7 @@ def get_conflicted_files() -> set[str]: merge_diff_filenames = zsplit( cmd_output( 'git', 'diff', '--name-only', '--no-ext-diff', '-z', - '-m', tree_hash, 'HEAD', 'MERGE_HEAD', + '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--', )[1], ) return set(merge_conflict_filenames) | set(merge_diff_filenames) @@ -219,7 +219,7 @@ def check_for_cygwin_mismatch() -> None: if is_cygwin_python ^ is_cygwin_git: exe_type = {True: '(cygwin)', False: '(windows)'} - logger.warn( + logger.warning( f'pre-commit has detected a mix of cygwin python / git\n' f'This combination is not supported, it is likely you will ' f'receive an error later in the program.\n' diff --git a/pre_commit/lang_base.py b/pre_commit/lang_base.py index 5303948b..198e9365 100644 --- a/pre_commit/lang_base.py +++ b/pre_commit/lang_base.py @@ -5,6 +5,7 @@ import os import random import re import shlex +import sys from collections.abc import Generator from collections.abc import Sequence from typing import Any @@ -127,7 +128,7 @@ def no_install( @contextlib.contextmanager -def no_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def no_env(prefix: Prefix, version: str) -> Generator[None]: yield @@ -171,7 +172,10 @@ def run_xargs( def hook_cmd(entry: str, args: Sequence[str]) -> tuple[str, ...]: - return (*shlex.split(entry), *args) + cmd = shlex.split(entry) + if cmd[:2] == ['pre-commit', 'hazmat']: + cmd = [sys.executable, '-m', 'pre_commit.commands.hazmat', *cmd[2:]] + return (*cmd, *args) def basic_run_hook( diff --git a/pre_commit/languages/conda.py b/pre_commit/languages/conda.py index 80b3e150..d397ebeb 100644 --- a/pre_commit/languages/conda.py +++ b/pre_commit/languages/conda.py @@ -41,7 +41,7 @@ def get_env_patch(env: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/coursier.py b/pre_commit/languages/coursier.py index 6558bf6b..08f9a958 100644 --- a/pre_commit/languages/coursier.py +++ b/pre_commit/languages/coursier.py @@ -70,7 +70,7 @@ def get_env_patch(target_dir: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/dart.py b/pre_commit/languages/dart.py index 129ac591..52a229ee 100644 --- a/pre_commit/languages/dart.py +++ b/pre_commit/languages/dart.py @@ -29,7 +29,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/docker.py b/pre_commit/languages/docker.py index 4de1d582..7f45ac86 100644 --- a/pre_commit/languages/docker.py +++ b/pre_commit/languages/docker.py @@ -1,8 +1,11 @@ from __future__ import annotations +import contextlib +import functools import hashlib import json import os +import re from collections.abc import Sequence from pre_commit import lang_base @@ -16,31 +19,33 @@ get_default_version = lang_base.basic_get_default_version health_check = lang_base.basic_health_check in_env = lang_base.no_env # no special environment for docker - -def _is_in_docker() -> bool: - try: - with open('/proc/1/cgroup', 'rb') as f: - return b'docker' in f.read() - except FileNotFoundError: - return False +_HOSTNAME_MOUNT_RE = re.compile( + rb""" + /containers + (?:/overlay-containers)? + /([a-z0-9]{64}) + (?:/userdata)? + /hostname + """, + re.VERBOSE, +) -def _get_container_id() -> str: - # It's assumed that we already check /proc/1/cgroup in _is_in_docker. The - # cpuset cgroup controller existed since cgroups were introduced so this - # way of getting the container ID is pretty reliable. - with open('/proc/1/cgroup', 'rb') as f: - for line in f.readlines(): - if line.split(b':')[1] == b'cpuset': - return os.path.basename(line.split(b':')[2]).strip().decode() - raise RuntimeError('Failed to find the container ID in /proc/1/cgroup.') +def _get_container_id() -> str | None: + with contextlib.suppress(FileNotFoundError): + with open('/proc/1/mountinfo', 'rb') as f: + for line in f: + m = _HOSTNAME_MOUNT_RE.search(line) + if m: + return m[1].decode() + + return None def _get_docker_path(path: str) -> str: - if not _is_in_docker(): - return path - container_id = _get_container_id() + if container_id is None: + return path try: _, out, _ = cmd_output_b('docker', 'inspect', container_id) @@ -101,7 +106,32 @@ def install_environment( os.mkdir(directory) +@functools.lru_cache(maxsize=1) +def _is_rootless() -> bool: # pragma: win32 no cover + retcode, out, _ = cmd_output_b( + 'docker', 'system', 'info', '--format', '{{ json . }}', + ) + if retcode != 0: + return False + + info = json.loads(out) + try: + return ( + # docker: + # https://docs.docker.com/reference/api/engine/version/v1.48/#tag/System/operation/SystemInfo + 'name=rootless' in (info.get('SecurityOptions') or ()) or + # podman: + # https://docs.podman.io/en/latest/_static/api.html?version=v5.4#tag/system/operation/SystemInfoLibpod + info['host']['security']['rootless'] + ) + except KeyError: + return False + + def get_docker_user() -> tuple[str, ...]: # pragma: win32 no cover + if _is_rootless(): + return () + try: return ('-u', f'{os.getuid()}:{os.getgid()}') except AttributeError: diff --git a/pre_commit/languages/dotnet.py b/pre_commit/languages/dotnet.py index e1202c4f..ffc65d1e 100644 --- a/pre_commit/languages/dotnet.py +++ b/pre_commit/languages/dotnet.py @@ -30,14 +30,14 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield @contextlib.contextmanager -def _nuget_config_no_sources() -> Generator[str, None, None]: +def _nuget_config_no_sources() -> Generator[str]: with tempfile.TemporaryDirectory() as tmpdir: nuget_config = os.path.join(tmpdir, 'nuget.config') with open(nuget_config, 'w') as f: diff --git a/pre_commit/languages/golang.py b/pre_commit/languages/golang.py index 66e07cf7..bedbd114 100644 --- a/pre_commit/languages/golang.py +++ b/pre_commit/languages/golang.py @@ -75,6 +75,7 @@ def get_env_patch(venv: str, version: str) -> PatchesT: return ( ('GOROOT', os.path.join(venv, '.go')), + ('GOTOOLCHAIN', 'local'), ( 'PATH', ( os.path.join(venv, 'bin'), os.pathsep, @@ -89,8 +90,7 @@ def _infer_go_version(version: str) -> str: if version != C.DEFAULT: return version resp = urllib.request.urlopen('https://go.dev/dl/?mode=json') - # TODO: 3.9+ .removeprefix('go') - return json.load(resp)[0]['version'][2:] + return json.load(resp)[0]['version'].removeprefix('go') def _get_url(version: str) -> str: @@ -121,7 +121,7 @@ def _install_go(version: str, dest: str) -> None: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield @@ -145,6 +145,7 @@ def install_environment( env = no_git_env(dict(os.environ, GOPATH=gopath)) env.pop('GOBIN', None) if version != 'system': + env['GOTOOLCHAIN'] = 'local' env['GOROOT'] = os.path.join(env_dir, '.go') env['PATH'] = os.pathsep.join(( os.path.join(env_dir, '.go', 'bin'), os.environ['PATH'], diff --git a/pre_commit/languages/haskell.py b/pre_commit/languages/haskell.py index c6945c82..28bca08c 100644 --- a/pre_commit/languages/haskell.py +++ b/pre_commit/languages/haskell.py @@ -24,7 +24,7 @@ def get_env_patch(target_dir: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/julia.py b/pre_commit/languages/julia.py new file mode 100644 index 00000000..7559b5ba --- /dev/null +++ b/pre_commit/languages/julia.py @@ -0,0 +1,133 @@ +from __future__ import annotations + +import contextlib +import os +import shutil +from collections.abc import Generator +from collections.abc import Sequence + +from pre_commit import lang_base +from pre_commit.envcontext import envcontext +from pre_commit.envcontext import PatchesT +from pre_commit.envcontext import UNSET +from pre_commit.prefix import Prefix +from pre_commit.util import cmd_output_b + +ENVIRONMENT_DIR = 'juliaenv' +health_check = lang_base.basic_health_check +get_default_version = lang_base.basic_get_default_version + + +def run_hook( + prefix: Prefix, + entry: str, + args: Sequence[str], + file_args: Sequence[str], + *, + is_local: bool, + require_serial: bool, + color: bool, +) -> tuple[int, bytes]: + # `entry` is a (hook-repo relative) file followed by (optional) args, e.g. + # `bin/id.jl` or `bin/hook.jl --arg1 --arg2` so we + # 1) shell parse it and join with args with hook_cmd + # 2) prepend the hooks prefix path to the first argument (the file), unless + # it is a local script + # 3) prepend `julia` as the interpreter + + cmd = lang_base.hook_cmd(entry, args) + script = cmd[0] if is_local else prefix.path(cmd[0]) + cmd = ('julia', '--startup-file=no', script, *cmd[1:]) + return lang_base.run_xargs( + cmd, + file_args, + require_serial=require_serial, + color=color, + ) + + +def get_env_patch(target_dir: str, version: str) -> PatchesT: + return ( + ('JULIA_LOAD_PATH', target_dir), + # May be set, remove it to not interfer with LOAD_PATH + ('JULIA_PROJECT', UNSET), + ) + + +@contextlib.contextmanager +def in_env(prefix: Prefix, version: str) -> Generator[None]: + envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) + with envcontext(get_env_patch(envdir, version)): + yield + + +def install_environment( + prefix: Prefix, + version: str, + additional_dependencies: Sequence[str], +) -> None: + envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) + with in_env(prefix, version): + # TODO: Support language_version with juliaup similar to rust via + # rustup + # if version != 'system': + # ... + + # Copy Project.toml to hook env if it exist + os.makedirs(envdir, exist_ok=True) + project_names = ('JuliaProject.toml', 'Project.toml') + project_found = False + for project_name in project_names: + project_file = prefix.path(project_name) + if not os.path.isfile(project_file): + continue + shutil.copy(project_file, envdir) + project_found = True + break + + # If no project file was found we create an empty one so that the + # package manager doesn't error + if not project_found: + open(os.path.join(envdir, 'Project.toml'), 'a').close() + + # Copy Manifest.toml to hook env if it exists + manifest_names = ('JuliaManifest.toml', 'Manifest.toml') + for manifest_name in manifest_names: + manifest_file = prefix.path(manifest_name) + if not os.path.isfile(manifest_file): + continue + shutil.copy(manifest_file, envdir) + break + + # Julia code to instantiate the hook environment + julia_code = """ + @assert length(ARGS) > 0 + hook_env = ARGS[1] + deps = join(ARGS[2:end], " ") + + # We prepend @stdlib here so that we can load the package manager even + # though `get_env_patch` limits `JULIA_LOAD_PATH` to just the hook env. + pushfirst!(LOAD_PATH, "@stdlib") + using Pkg + popfirst!(LOAD_PATH) + + # Instantiate the environment shipped with the hook repo. If we have + # additional dependencies we disable precompilation in this step to + # avoid double work. + precompile = isempty(deps) ? "1" : "0" + withenv("JULIA_PKG_PRECOMPILE_AUTO" => precompile) do + Pkg.instantiate() + end + + # Add additional dependencies (with precompilation) + if !isempty(deps) + withenv("JULIA_PKG_PRECOMPILE_AUTO" => "1") do + Pkg.REPLMode.pkgstr("add " * deps) + end + end + """ + cmd_output_b( + 'julia', '--startup-file=no', '-e', julia_code, '--', envdir, + *additional_dependencies, + cwd=prefix.prefix_dir, + ) diff --git a/pre_commit/languages/lua.py b/pre_commit/languages/lua.py index a475ec99..15ac1a2e 100644 --- a/pre_commit/languages/lua.py +++ b/pre_commit/languages/lua.py @@ -44,7 +44,7 @@ def get_env_patch(d: str) -> PatchesT: # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/node.py b/pre_commit/languages/node.py index d49c0e32..af7dc6f8 100644 --- a/pre_commit/languages/node.py +++ b/pre_commit/languages/node.py @@ -59,7 +59,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/perl.py b/pre_commit/languages/perl.py index 61b1d114..a07d442a 100644 --- a/pre_commit/languages/perl.py +++ b/pre_commit/languages/perl.py @@ -33,7 +33,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/python.py b/pre_commit/languages/python.py index 9f4bf69a..88ececce 100644 --- a/pre_commit/languages/python.py +++ b/pre_commit/languages/python.py @@ -75,6 +75,13 @@ def _find_by_py_launcher( return None +def _impl_exe_name() -> str: + if sys.implementation.name == 'cpython': # pragma: cpython cover + return 'python' + else: # pragma: cpython no cover + return sys.implementation.name # pypy mostly + + def _find_by_sys_executable() -> str | None: def _norm(path: str) -> str | None: _, exe = os.path.split(path.lower()) @@ -100,18 +107,25 @@ def _find_by_sys_executable() -> str | None: @functools.lru_cache(maxsize=1) def get_default_version() -> str: # pragma: no cover (platform dependent) - # First attempt from `sys.executable` (or the realpath) - exe = _find_by_sys_executable() - if exe: - return exe + v_major = f'{sys.version_info[0]}' + v_minor = f'{sys.version_info[0]}.{sys.version_info[1]}' - # Next try the `pythonX.X` executable - exe = f'python{sys.version_info[0]}.{sys.version_info[1]}' - if find_executable(exe): - return exe + # attempt the likely implementation exe + for potential in (v_minor, v_major): + exe = f'{_impl_exe_name()}{potential}' + if find_executable(exe): + return exe - if _find_by_py_launcher(exe): - return exe + # next try `sys.executable` (or the realpath) + maybe_exe = _find_by_sys_executable() + if maybe_exe: + return maybe_exe + + # maybe on windows we can find it via py launcher? + if sys.platform == 'win32': # pragma: win32 cover + exe = f'python{v_minor}' + if _find_by_py_launcher(exe): + return exe # We tried! return C.DEFAULT @@ -152,7 +166,7 @@ def norm_version(version: str) -> str | None: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/r.py b/pre_commit/languages/r.py index 5d18bf1c..f70d2fdc 100644 --- a/pre_commit/languages/r.py +++ b/pre_commit/languages/r.py @@ -15,27 +15,50 @@ from pre_commit.envcontext import PatchesT from pre_commit.envcontext import UNSET from pre_commit.prefix import Prefix from pre_commit.util import cmd_output -from pre_commit.util import cmd_output_b from pre_commit.util import win_exe ENVIRONMENT_DIR = 'renv' -RSCRIPT_OPTS = ('--no-save', '--no-restore', '--no-site-file', '--no-environ') get_default_version = lang_base.basic_get_default_version +_RENV_ACTIVATED_OPTS = ( + '--no-save', '--no-restore', '--no-site-file', '--no-environ', +) -def _execute_vanilla_r_code_as_script( + +def _execute_r( code: str, *, prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, + cli_opts: Sequence[str], ) -> str: with in_env(prefix, version), _r_code_in_tempfile(code) as f: _, out, _ = cmd_output( - _rscript_exec(), *RSCRIPT_OPTS, f, *args, cwd=cwd, + _rscript_exec(), *cli_opts, f, *args, cwd=cwd, ) return out.rstrip('\n') +def _execute_r_in_renv( + code: str, *, + prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, +) -> str: + return _execute_r( + code=code, prefix=prefix, version=version, args=args, cwd=cwd, + cli_opts=_RENV_ACTIVATED_OPTS, + ) + + +def _execute_vanilla_r( + code: str, *, + prefix: Prefix, version: str, args: Sequence[str] = (), cwd: str, +) -> str: + return _execute_r( + code=code, prefix=prefix, version=version, args=args, cwd=cwd, + cli_opts=('--vanilla',), + ) + + def _read_installed_version(envdir: str, prefix: Prefix, version: str) -> str: - return _execute_vanilla_r_code_as_script( + return _execute_r_in_renv( 'cat(renv::settings$r.version())', prefix=prefix, version=version, cwd=envdir, @@ -43,7 +66,7 @@ def _read_installed_version(envdir: str, prefix: Prefix, version: str) -> str: def _read_executable_version(envdir: str, prefix: Prefix, version: str) -> str: - return _execute_vanilla_r_code_as_script( + return _execute_r_in_renv( 'cat(as.character(getRversion()))', prefix=prefix, version=version, cwd=envdir, @@ -53,7 +76,7 @@ def _read_executable_version(envdir: str, prefix: Prefix, version: str) -> str: def _write_current_r_version( envdir: str, prefix: Prefix, version: str, ) -> None: - _execute_vanilla_r_code_as_script( + _execute_r_in_renv( 'renv::settings$r.version(as.character(getRversion()))', prefix=prefix, version=version, cwd=envdir, @@ -85,7 +108,7 @@ def health_check(prefix: Prefix, version: str) -> str | None: @contextlib.contextmanager -def _r_code_in_tempfile(code: str) -> Generator[str, None, None]: +def _r_code_in_tempfile(code: str) -> Generator[str]: """ To avoid quoting and escaping issues, avoid `Rscript [options] -e {expr}` but use `Rscript [options] path/to/file_with_expr.R` @@ -105,7 +128,7 @@ def get_env_patch(venv: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield @@ -161,7 +184,7 @@ def _cmd_from_hook( _entry_validate(cmd) cmd_part = _prefix_if_file_entry(cmd, prefix, is_local=is_local) - return (cmd[0], *RSCRIPT_OPTS, *cmd_part, *args) + return (cmd[0], *_RENV_ACTIVATED_OPTS, *cmd_part, *args) def install_environment( @@ -204,14 +227,15 @@ def install_environment( renv::install(prefix_dir) }} """ - - with _r_code_in_tempfile(r_code_inst_environment) as f: - cmd_output_b(_rscript_exec(), '--vanilla', f, cwd=env_dir) + _execute_vanilla_r( + r_code_inst_environment, + prefix=prefix, version=version, cwd=env_dir, + ) _write_current_r_version(envdir=env_dir, prefix=prefix, version=version) if additional_dependencies: r_code_inst_add = 'renv::install(commandArgs(trailingOnly = TRUE))' - _execute_vanilla_r_code_as_script( + _execute_r_in_renv( code=r_code_inst_add, prefix=prefix, version=version, args=additional_dependencies, cwd=env_dir, diff --git a/pre_commit/languages/ruby.py b/pre_commit/languages/ruby.py index 0438ae09..f32fea3f 100644 --- a/pre_commit/languages/ruby.py +++ b/pre_commit/languages/ruby.py @@ -73,7 +73,7 @@ def get_env_patch( @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield diff --git a/pre_commit/languages/rust.py b/pre_commit/languages/rust.py index 5f9db8fb..fd77a9d2 100644 --- a/pre_commit/languages/rust.py +++ b/pre_commit/languages/rust.py @@ -61,7 +61,7 @@ def get_env_patch(target_dir: str, version: str) -> PatchesT: @contextlib.contextmanager -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir, version)): yield diff --git a/pre_commit/languages/swift.py b/pre_commit/languages/swift.py index f7bfe84c..08a9c39a 100644 --- a/pre_commit/languages/swift.py +++ b/pre_commit/languages/swift.py @@ -27,7 +27,7 @@ def get_env_patch(venv: str) -> PatchesT: # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover -def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: +def in_env(prefix: Prefix, version: str) -> Generator[None]: envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) with envcontext(get_env_patch(envdir)): yield diff --git a/pre_commit/languages/system.py b/pre_commit/languages/unsupported.py similarity index 100% rename from pre_commit/languages/system.py rename to pre_commit/languages/unsupported.py diff --git a/pre_commit/languages/script.py b/pre_commit/languages/unsupported_script.py similarity index 100% rename from pre_commit/languages/script.py rename to pre_commit/languages/unsupported_script.py diff --git a/pre_commit/logging_handler.py b/pre_commit/logging_handler.py index cd33953d..74772bee 100644 --- a/pre_commit/logging_handler.py +++ b/pre_commit/logging_handler.py @@ -32,7 +32,7 @@ class LoggingHandler(logging.Handler): @contextlib.contextmanager -def logging_handler(use_color: bool) -> Generator[None, None, None]: +def logging_handler(use_color: bool) -> Generator[None]: handler = LoggingHandler(use_color) logger.addHandler(handler) logger.setLevel(logging.INFO) diff --git a/pre_commit/main.py b/pre_commit/main.py index 559c927c..0c3eefda 100644 --- a/pre_commit/main.py +++ b/pre_commit/main.py @@ -10,6 +10,7 @@ import pre_commit.constants as C from pre_commit import clientlib from pre_commit import git from pre_commit.color import add_color_option +from pre_commit.commands import hazmat from pre_commit.commands.autoupdate import autoupdate from pre_commit.commands.clean import clean from pre_commit.commands.gc import gc @@ -41,7 +42,7 @@ os.environ.pop('__PYVENV_LAUNCHER__', None) os.environ.pop('PYTHONEXECUTABLE', None) COMMANDS_NO_GIT = { - 'clean', 'gc', 'init-templatedir', 'sample-config', + 'clean', 'gc', 'hazmat', 'init-templatedir', 'sample-config', 'validate-config', 'validate-manifest', } @@ -62,10 +63,10 @@ def _add_hook_type_option(parser: argparse.ArgumentParser) -> None: def _add_run_options(parser: argparse.ArgumentParser) -> None: parser.add_argument('hook', nargs='?', help='A single hook-id to run') - parser.add_argument('--verbose', '-v', action='store_true', default=False) + parser.add_argument('--verbose', '-v', action='store_true') mutex_group = parser.add_mutually_exclusive_group(required=False) mutex_group.add_argument( - '--all-files', '-a', action='store_true', default=False, + '--all-files', '-a', action='store_true', help='Run on all the files in the repo.', ) mutex_group.add_argument( @@ -76,6 +77,10 @@ def _add_run_options(parser: argparse.ArgumentParser) -> None: '--show-diff-on-failure', action='store_true', help='When hooks fail, run `git diff` directly afterward.', ) + parser.add_argument( + '--fail-fast', action='store_true', + help='Stop after the first failing hook.', + ) parser.add_argument( '--hook-stage', choices=clientlib.STAGES, @@ -241,6 +246,11 @@ def main(argv: Sequence[str] | None = None) -> int: _add_cmd('gc', help='Clean unused cached repos.') + hazmat_parser = _add_cmd( + 'hazmat', help='Composable tools for rare use in hook `entry`.', + ) + hazmat.add_parsers(hazmat_parser) + init_templatedir_parser = _add_cmd( 'init-templatedir', help=( @@ -275,7 +285,7 @@ def main(argv: Sequence[str] | None = None) -> int: ) _add_hook_type_option(install_parser) install_parser.add_argument( - '--allow-missing-config', action='store_true', default=False, + '--allow-missing-config', action='store_true', help=( 'Whether to allow a missing `pre-commit` configuration file ' 'or exit with a failure code.' @@ -385,6 +395,8 @@ def main(argv: Sequence[str] | None = None) -> int: return clean(store) elif args.command == 'gc': return gc(store) + elif args.command == 'hazmat': + return hazmat.impl(args) elif args.command == 'hook-impl': return hook_impl( store, diff --git a/pre_commit/repository.py b/pre_commit/repository.py index aa841856..a9461ab6 100644 --- a/pre_commit/repository.py +++ b/pre_commit/repository.py @@ -3,7 +3,6 @@ from __future__ import annotations import json import logging import os -import shlex from collections.abc import Sequence from typing import Any @@ -68,14 +67,6 @@ def _hook_install(hook: Hook) -> None: logger.info('Once installed this environment will be reused.') logger.info('This may take a few minutes...') - if hook.language == 'python_venv': - logger.warning( - f'`repo: {hook.src}` uses deprecated `language: python_venv`. ' - f'This is an alias for `language: python`. ' - f'Often `pre-commit autoupdate --repo {shlex.quote(hook.src)}` ' - f'will fix this.', - ) - lang = languages[hook.language] assert lang.ENVIRONMENT_DIR is not None diff --git a/pre_commit/resources/empty_template_pubspec.yaml b/pre_commit/resources/empty_template_pubspec.yaml index 3be6ffe3..8306aeb6 100644 --- a/pre_commit/resources/empty_template_pubspec.yaml +++ b/pre_commit/resources/empty_template_pubspec.yaml @@ -1,4 +1,4 @@ name: pre_commit_empty_pubspec environment: - sdk: '>=2.10.0' + sdk: '>=2.12.0' executables: {} diff --git a/pre_commit/resources/empty_template_setup.py b/pre_commit/resources/empty_template_setup.py index ef05eef8..e8b1ff02 100644 --- a/pre_commit/resources/empty_template_setup.py +++ b/pre_commit/resources/empty_template_setup.py @@ -1,4 +1,4 @@ from setuptools import setup -setup(name='pre-commit-placeholder-package', version='0.0.0') +setup(name='pre-commit-placeholder-package', version='0.0.0', py_modules=[]) diff --git a/pre_commit/resources/rbenv.tar.gz b/pre_commit/resources/rbenv.tar.gz index da2514e7..b5df0874 100644 Binary files a/pre_commit/resources/rbenv.tar.gz and b/pre_commit/resources/rbenv.tar.gz differ diff --git a/pre_commit/resources/ruby-build.tar.gz b/pre_commit/resources/ruby-build.tar.gz index 19d467fd..5c82c906 100644 Binary files a/pre_commit/resources/ruby-build.tar.gz and b/pre_commit/resources/ruby-build.tar.gz differ diff --git a/pre_commit/resources/ruby-download.tar.gz b/pre_commit/resources/ruby-download.tar.gz index 92502a77..f7cb0b42 100644 Binary files a/pre_commit/resources/ruby-download.tar.gz and b/pre_commit/resources/ruby-download.tar.gz differ diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py index e1f81ba9..99ea0979 100644 --- a/pre_commit/staged_files_only.py +++ b/pre_commit/staged_files_only.py @@ -33,7 +33,7 @@ def _git_apply(patch: str) -> None: @contextlib.contextmanager -def _intent_to_add_cleared() -> Generator[None, None, None]: +def _intent_to_add_cleared() -> Generator[None]: intent_to_add = git.intent_to_add_files() if intent_to_add: logger.warning('Unstaged intent-to-add files detected.') @@ -48,7 +48,7 @@ def _intent_to_add_cleared() -> Generator[None, None, None]: @contextlib.contextmanager -def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]: +def _unstaged_changes_cleared(patch_dir: str) -> Generator[None]: tree = cmd_output('git', 'write-tree')[1].strip() diff_cmd = ( 'git', 'diff-index', '--ignore-submodules', '--binary', @@ -105,7 +105,7 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]: @contextlib.contextmanager -def staged_files_only(patch_dir: str) -> Generator[None, None, None]: +def staged_files_only(patch_dir: str) -> Generator[None]: """Clear any unstaged changes from the git working directory inside this context. """ diff --git a/pre_commit/store.py b/pre_commit/store.py index 84bc09a4..dc90c051 100644 --- a/pre_commit/store.py +++ b/pre_commit/store.py @@ -5,18 +5,18 @@ import logging import os.path import sqlite3 import tempfile +from collections.abc import Callable from collections.abc import Generator from collections.abc import Sequence -from typing import Callable import pre_commit.constants as C +from pre_commit import clientlib from pre_commit import file_lock from pre_commit import git from pre_commit.util import CalledProcessError from pre_commit.util import clean_path_on_failure from pre_commit.util import cmd_output_b from pre_commit.util import resource_text -from pre_commit.util import rmtree logger = logging.getLogger('pre_commit') @@ -95,13 +95,13 @@ class Store: ' PRIMARY KEY (repo, ref)' ');', ) - self._create_config_table(db) + self._create_configs_table(db) # Atomic file move os.replace(tmpfile, self.db_path) @contextlib.contextmanager - def exclusive_lock(self) -> Generator[None, None, None]: + def exclusive_lock(self) -> Generator[None]: def blocked_cb() -> None: # pragma: no cover (tests are in-process) logger.info('Locking pre-commit directory') @@ -112,7 +112,7 @@ class Store: def connect( self, db_path: str | None = None, - ) -> Generator[sqlite3.Connection, None, None]: + ) -> Generator[sqlite3.Connection]: db_path = db_path or self.db_path # sqlite doesn't close its fd with its contextmanager >.< # contextlib.closing fixes this. @@ -136,6 +136,7 @@ class Store: deps: Sequence[str], make_strategy: Callable[[str], None], ) -> str: + original_repo = repo repo = self.db_repo_name(repo, deps) def _get_result() -> str | None: @@ -168,6 +169,9 @@ class Store: 'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)', [repo, ref, directory], ) + + clientlib.warn_for_stages_on_repo_init(original_repo, directory) + return directory def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None: @@ -210,7 +214,7 @@ class Store: 'local', C.LOCAL_REPO_VERSION, deps, _make_local_repo, ) - def _create_config_table(self, db: sqlite3.Connection) -> None: + def _create_configs_table(self, db: sqlite3.Connection) -> None: db.executescript( 'CREATE TABLE IF NOT EXISTS configs (' ' path TEXT NOT NULL,' @@ -227,28 +231,5 @@ class Store: return with self.connect() as db: # TODO: eventually remove this and only create in _create - self._create_config_table(db) + self._create_configs_table(db) db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,)) - - def select_all_configs(self) -> list[str]: - with self.connect() as db: - self._create_config_table(db) - rows = db.execute('SELECT path FROM configs').fetchall() - return [path for path, in rows] - - def delete_configs(self, configs: list[str]) -> None: - with self.connect() as db: - rows = [(path,) for path in configs] - db.executemany('DELETE FROM configs WHERE path = ?', rows) - - def select_all_repos(self) -> list[tuple[str, str, str]]: - with self.connect() as db: - return db.execute('SELECT repo, ref, path from repos').fetchall() - - def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None: - with self.connect() as db: - db.execute( - 'DELETE FROM repos WHERE repo = ? and ref = ?', - (db_repo_name, ref), - ) - rmtree(path) diff --git a/pre_commit/util.py b/pre_commit/util.py index 12aa3c0e..19b1880b 100644 --- a/pre_commit/util.py +++ b/pre_commit/util.py @@ -8,10 +8,10 @@ import shutil import stat import subprocess import sys +from collections.abc import Callable from collections.abc import Generator from types import TracebackType from typing import Any -from typing import Callable from pre_commit import parse_shebang @@ -25,7 +25,7 @@ def force_bytes(exc: Any) -> bytes: @contextlib.contextmanager -def clean_path_on_failure(path: str) -> Generator[None, None, None]: +def clean_path_on_failure(path: str) -> Generator[None]: """Cleans up the directory on an exceptional failure.""" try: yield diff --git a/pre_commit/xargs.py b/pre_commit/xargs.py index 22580f59..7c98d167 100644 --- a/pre_commit/xargs.py +++ b/pre_commit/xargs.py @@ -7,12 +7,12 @@ import multiprocessing import os import subprocess import sys +from collections.abc import Callable from collections.abc import Generator from collections.abc import Iterable from collections.abc import MutableMapping from collections.abc import Sequence from typing import Any -from typing import Callable from typing import TypeVar from pre_commit import parse_shebang @@ -120,7 +120,6 @@ def partition( @contextlib.contextmanager def _thread_mapper(maxsize: int) -> Generator[ Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]], - None, None, ]: if maxsize == 1: yield map diff --git a/pre_commit/yaml.py b/pre_commit/yaml.py index bdf4ec47..a5bbbc99 100644 --- a/pre_commit/yaml.py +++ b/pre_commit/yaml.py @@ -6,6 +6,7 @@ from typing import Any import yaml Loader = getattr(yaml, 'CSafeLoader', yaml.SafeLoader) +yaml_compose = functools.partial(yaml.compose, Loader=Loader) yaml_load = functools.partial(yaml.load, Loader=Loader) Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper) diff --git a/pre_commit/yaml_rewrite.py b/pre_commit/yaml_rewrite.py new file mode 100644 index 00000000..8d0e8fdb --- /dev/null +++ b/pre_commit/yaml_rewrite.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Iterable +from typing import NamedTuple +from typing import Protocol + +from yaml.nodes import MappingNode +from yaml.nodes import Node +from yaml.nodes import ScalarNode +from yaml.nodes import SequenceNode + + +class _Matcher(Protocol): + def match(self, n: Node) -> Generator[Node]: ... + + +class MappingKey(NamedTuple): + k: str + + def match(self, n: Node) -> Generator[Node]: + if isinstance(n, MappingNode): + for k, _ in n.value: + if k.value == self.k: + yield k + + +class MappingValue(NamedTuple): + k: str + + def match(self, n: Node) -> Generator[Node]: + if isinstance(n, MappingNode): + for k, v in n.value: + if k.value == self.k: + yield v + + +class SequenceItem(NamedTuple): + def match(self, n: Node) -> Generator[Node]: + if isinstance(n, SequenceNode): + yield from n.value + + +def _match(gen: Iterable[Node], m: _Matcher) -> Iterable[Node]: + return (n for src in gen for n in m.match(src)) + + +def match(n: Node, matcher: tuple[_Matcher, ...]) -> Generator[ScalarNode]: + gen: Iterable[Node] = (n,) + for m in matcher: + gen = _match(gen, m) + return (n for n in gen if isinstance(n, ScalarNode)) diff --git a/setup.cfg b/setup.cfg index 52b7681e..a95ee447 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = pre_commit -version = 3.8.0 +version = 4.5.1 description = A framework for managing and maintaining multi-language pre-commit hooks. long_description = file: README.md long_description_content_type = text/markdown @@ -10,7 +10,6 @@ author_email = asottile@umich.edu license = MIT license_files = LICENSE classifiers = - License :: OSI Approved :: MIT License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation :: CPython @@ -24,7 +23,7 @@ install_requires = nodeenv>=0.11.1 pyyaml>=5.1 virtualenv>=20.10.0 -python_requires = >=3.9 +python_requires = >=3.10 [options.packages.find] exclude = @@ -53,6 +52,7 @@ check_untyped_defs = true disallow_any_generics = true disallow_incomplete_defs = true disallow_untyped_defs = true +enable_error_code = deprecated warn_redundant_casts = true warn_unused_ignores = true diff --git a/testing/get-dart.sh b/testing/get-dart.sh index 998b9d98..b4545e71 100755 --- a/testing/get-dart.sh +++ b/testing/get-dart.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -euo pipefail -VERSION=2.13.4 +VERSION=2.19.6 if [ "$OSTYPE" = msys ]; then URL="https://storage.googleapis.com/dart-archive/channels/stable/release/${VERSION}/sdk/dartsdk-windows-x64-release.zip" diff --git a/testing/make-archives b/testing/make-archives index 3c7ab9dd..10f40a3a 100755 --- a/testing/make-archives +++ b/testing/make-archives @@ -16,8 +16,8 @@ from collections.abc import Sequence REPOS = ( - ('rbenv', 'https://github.com/rbenv/rbenv', '38e1fbb'), - ('ruby-build', 'https://github.com/rbenv/ruby-build', '855b963'), + ('rbenv', 'https://github.com/rbenv/rbenv', '10e96bfc'), + ('ruby-build', 'https://github.com/rbenv/ruby-build', '447468b1'), ( 'ruby-download', 'https://github.com/garnieretienne/rvm-download', @@ -57,8 +57,7 @@ def make_archive(name: str, repo: str, ref: str, destdir: str) -> str: arcs.sort() with gzip.GzipFile(output_path, 'wb', mtime=0) as gzipf: - # https://github.com/python/typeshed/issues/5491 - with tarfile.open(fileobj=gzipf, mode='w') as tf: # type: ignore + with tarfile.open(fileobj=gzipf, mode='w') as tf: for arcname, abspath in arcs: tf.add( abspath, diff --git a/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml deleted file mode 100644 index 2c237009..00000000 --- a/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml +++ /dev/null @@ -1,6 +0,0 @@ -- id: python3-hook - name: Python 3 Hook - entry: python3-hook - language: python - language_version: python3 - files: \.py$ diff --git a/testing/resources/python3_hooks_repo/py3_hook.py b/testing/resources/python3_hooks_repo/py3_hook.py deleted file mode 100644 index 8c9cda4c..00000000 --- a/testing/resources/python3_hooks_repo/py3_hook.py +++ /dev/null @@ -1,8 +0,0 @@ -import sys - - -def main(): - print(sys.version_info[0]) - print(repr(sys.argv[1:])) - print('Hello World') - return 0 diff --git a/testing/resources/python3_hooks_repo/setup.py b/testing/resources/python3_hooks_repo/setup.py deleted file mode 100644 index 9125dc1d..00000000 --- a/testing/resources/python3_hooks_repo/setup.py +++ /dev/null @@ -1,8 +0,0 @@ -from setuptools import setup - -setup( - name='python3_hook', - version='0.0.0', - py_modules=['py3_hook'], - entry_points={'console_scripts': ['python3-hook = py3_hook:main']}, -) diff --git a/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml b/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml deleted file mode 100644 index b2c347c1..00000000 --- a/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml +++ /dev/null @@ -1,5 +0,0 @@ -- id: system-hook-with-spaces - name: System hook with spaces - entry: bash -c 'echo "Hello World"' - language: system - files: \.sh$ diff --git a/testing/util.py b/testing/util.py index 08d52cbc..1646ccd2 100644 --- a/testing/util.py +++ b/testing/util.py @@ -40,6 +40,7 @@ def run_opts( color=False, verbose=False, hook=None, + fail_fast=False, remote_branch='', local_branch='', from_ref='', @@ -65,6 +66,7 @@ def run_opts( color=color, verbose=verbose, hook=hook, + fail_fast=fail_fast, remote_branch=remote_branch, local_branch=local_branch, from_ref=from_ref, diff --git a/testing/zipapp/make b/testing/zipapp/make index 165046f6..43bb4373 100755 --- a/testing/zipapp/make +++ b/testing/zipapp/make @@ -107,9 +107,6 @@ def main() -> int: shebang = '/usr/bin/env python3' zipapp.create_archive(tmpdir, filename, interpreter=shebang) - with open(f'{filename}.sha256sum', 'w') as f: - subprocess.check_call(('sha256sum', filename), stdout=f) - return 0 diff --git a/tests/all_languages_test.py b/tests/all_languages_test.py deleted file mode 100644 index 98c91215..00000000 --- a/tests/all_languages_test.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import annotations - -from pre_commit.all_languages import languages - - -def test_python_venv_is_an_alias_to_python(): - assert languages['python_venv'] is languages['python'] diff --git a/tests/clientlib_test.py b/tests/clientlib_test.py index eaa8a044..2c42b80c 100644 --- a/tests/clientlib_test.py +++ b/tests/clientlib_test.py @@ -12,6 +12,8 @@ from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION +from pre_commit.clientlib import InvalidManifestError +from pre_commit.clientlib import load_manifest from pre_commit.clientlib import MANIFEST_HOOK_DICT from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import META_HOOK_DICT @@ -256,6 +258,24 @@ def test_validate_optional_sensible_regex_at_local_hook(caplog): ] +def test_validate_optional_sensible_regex_at_meta_hook(caplog): + config_obj = { + 'repo': 'meta', + 'hooks': [{'id': 'identity', 'files': 'dir/*.py'}], + } + + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + assert caplog.record_tuples == [ + ( + 'pre_commit', + logging.WARNING, + "The 'files' field in hook 'identity' is a regex, not a glob " + "-- matching '/*' probably isn't what you want here", + ), + ] + + @pytest.mark.parametrize( ('regex', 'warning'), ( @@ -291,6 +311,97 @@ def test_validate_optional_sensible_regex_at_top_level(caplog, regex, warning): assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)] +def test_invalid_stages_error(): + cfg = {'repos': [sample_local_config()]} + cfg['repos'][0]['hooks'][0]['stages'] = ['invalid'] + + with pytest.raises(cfgv.ValidationError) as excinfo: + cfgv.validate(cfg, CONFIG_SCHEMA) + + assert str(excinfo.value) == ( + '\n' + '==> At Config()\n' + '==> At key: repos\n' + "==> At Repository(repo='local')\n" + '==> At key: hooks\n' + "==> At Hook(id='do_not_commit')\n" + # this line was missing due to the custom validator + '==> At key: stages\n' + '==> At index 0\n' + "=====> Expected one of commit-msg, manual, post-checkout, post-commit, post-merge, post-rewrite, pre-commit, pre-merge-commit, pre-push, pre-rebase, prepare-commit-msg but got: 'invalid'" # noqa: E501 + ) + + +def test_warning_for_deprecated_stages(caplog): + config_obj = sample_local_config() + config_obj['hooks'][0]['stages'] = ['commit', 'push'] + + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + assert caplog.record_tuples == [ + ( + 'pre_commit', + logging.WARNING, + 'hook id `do_not_commit` uses deprecated stage names ' + '(commit, push) which will be removed in a future version. ' + 'run: `pre-commit migrate-config` to automatically fix this.', + ), + ] + + +def test_no_warning_for_non_deprecated_stages(caplog): + config_obj = sample_local_config() + config_obj['hooks'][0]['stages'] = ['pre-commit', 'pre-push'] + + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + assert caplog.record_tuples == [] + + +def test_warning_for_deprecated_default_stages(caplog): + cfg = {'default_stages': ['commit', 'push'], 'repos': []} + + cfgv.validate(cfg, CONFIG_SCHEMA) + + assert caplog.record_tuples == [ + ( + 'pre_commit', + logging.WARNING, + 'top-level `default_stages` uses deprecated stage names ' + '(commit, push) which will be removed in a future version. ' + 'run: `pre-commit migrate-config` to automatically fix this.', + ), + ] + + +def test_no_warning_for_non_deprecated_default_stages(caplog): + cfg = {'default_stages': ['pre-commit', 'pre-push'], 'repos': []} + + cfgv.validate(cfg, CONFIG_SCHEMA) + + assert caplog.record_tuples == [] + + +def test_unsupported_language_migration(): + cfg = {'repos': [sample_local_config(), sample_local_config()]} + cfg['repos'][0]['hooks'][0]['language'] = 'system' + cfg['repos'][1]['hooks'][0]['language'] = 'script' + + cfgv.validate(cfg, CONFIG_SCHEMA) + ret = cfgv.apply_defaults(cfg, CONFIG_SCHEMA) + + assert ret['repos'][0]['hooks'][0]['language'] == 'unsupported' + assert ret['repos'][1]['hooks'][0]['language'] == 'unsupported_script' + + +def test_unsupported_language_migration_language_required(): + cfg = {'repos': [sample_local_config()]} + del cfg['repos'][0]['hooks'][0]['language'] + + with pytest.raises(cfgv.ValidationError): + cfgv.validate(cfg, CONFIG_SCHEMA) + + @pytest.mark.parametrize( 'manifest_obj', ( @@ -479,3 +590,18 @@ def test_config_hook_stages_defaulting(): 'id': 'fake-hook', 'stages': ['commit-msg', 'pre-push', 'pre-commit', 'pre-merge-commit'], } + + +def test_manifest_v5_forward_compat(tmp_path): + manifest = tmp_path.joinpath('.pre-commit-hooks.yaml') + manifest.write_text('hooks: {}') + + with pytest.raises(InvalidManifestError) as excinfo: + load_manifest(manifest) + assert str(excinfo.value) == ( + f'\n' + f'==> File {manifest}\n' + f'=====> \n' + f'=====> pre-commit version 5 is required but version {C.VERSION} ' + f'is installed. Perhaps run `pip install --upgrade pre-commit`.' + ) diff --git a/tests/commands/gc_test.py b/tests/commands/gc_test.py index 95113ed5..992b02f3 100644 --- a/tests/commands/gc_test.py +++ b/tests/commands/gc_test.py @@ -19,11 +19,13 @@ from testing.util import git_commit def _repo_count(store): - return len(store.select_all_repos()) + with store.connect() as db: + return db.execute('SELECT COUNT(1) FROM repos').fetchone()[0] def _config_count(store): - return len(store.select_all_configs()) + with store.connect() as db: + return db.execute('SELECT COUNT(1) FROM configs').fetchone()[0] def _remove_config_assert_cleared(store, cap_out): @@ -153,7 +155,8 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out): install_hooks(C.CONFIG_FILE, store) # we'll "break" the manifest to simulate an old version clone - (_, _, path), = store.select_all_repos() + with store.connect() as db: + path, = db.execute('SELECT path FROM repos').fetchone() os.remove(os.path.join(path, C.MANIFEST_FILE)) assert _config_count(store) == 1 @@ -162,3 +165,11 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out): assert _config_count(store) == 1 assert _repo_count(store) == 0 assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.' + + +def test_gc_pre_1_14_roll_forward(store, cap_out): + with store.connect() as db: # simulate pre-1.14.0 + db.executescript('DROP TABLE configs') + + assert not gc(store) + assert cap_out.get() == '0 repo(s) removed.\n' diff --git a/tests/commands/hazmat_test.py b/tests/commands/hazmat_test.py new file mode 100644 index 00000000..df957e36 --- /dev/null +++ b/tests/commands/hazmat_test.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import sys + +import pytest + +from pre_commit.commands.hazmat import _cmd_filenames +from pre_commit.commands.hazmat import main +from testing.util import cwd + + +def test_cmd_filenames_no_dash_dash(): + with pytest.raises(SystemExit) as excinfo: + _cmd_filenames(('no', 'dashdash', 'here')) + msg, = excinfo.value.args + assert msg == 'hazmat entry must end with `--`' + + +def test_cmd_filenames_no_filenames(): + cmd, filenames = _cmd_filenames(('hello', 'world', '--')) + assert cmd == ('hello', 'world') + assert filenames == () + + +def test_cmd_filenames_some_filenames(): + cmd, filenames = _cmd_filenames(('hello', 'world', '--', 'f1', 'f2')) + assert cmd == ('hello', 'world') + assert filenames == ('f1', 'f2') + + +def test_cmd_filenames_multiple_dashdash(): + cmd, filenames = _cmd_filenames(('hello', '--', 'arg', '--', 'f1', 'f2')) + assert cmd == ('hello', '--', 'arg') + assert filenames == ('f1', 'f2') + + +def test_cd_unexpected_filename(): + with pytest.raises(SystemExit) as excinfo: + main(('cd', 'subdir', 'cmd', '--', 'subdir/1', 'not-subdir/2')) + msg, = excinfo.value.args + assert msg == "unexpected file without prefix='subdir/': not-subdir/2" + + +def _norm(out): + return out.replace('\r\n', '\n') + + +def test_cd(tmp_path, capfd): + subdir = tmp_path.joinpath('subdir') + subdir.mkdir() + subdir.joinpath('a').write_text('a') + subdir.joinpath('b').write_text('b') + + with cwd(tmp_path): + ret = main(( + 'cd', 'subdir', + sys.executable, '-c', + 'import os; print(os.getcwd());' + 'import sys; [print(open(f).read()) for f in sys.argv[1:]]', + '--', + 'subdir/a', 'subdir/b', + )) + + assert ret == 0 + out, err = capfd.readouterr() + assert _norm(out) == f'{subdir}\na\nb\n' + assert err == '' + + +def test_ignore_exit_code(capfd): + ret = main(( + 'ignore-exit-code', sys.executable, '-c', 'raise SystemExit("bye")', + )) + assert ret == 0 + out, err = capfd.readouterr() + assert out == '' + assert _norm(err) == 'bye\n' + + +def test_n1(capfd): + ret = main(( + 'n1', sys.executable, '-c', 'import sys; print(sys.argv[1:])', + '--', + 'foo', 'bar', 'baz', + )) + assert ret == 0 + out, err = capfd.readouterr() + assert _norm(out) == "['foo']\n['bar']\n['baz']\n" + assert err == '' + + +def test_n1_some_error_code(): + ret = main(( + 'n1', sys.executable, '-c', + 'import sys; raise SystemExit(sys.argv[1] == "error")', + '--', + 'ok', 'error', 'ok', + )) + assert ret == 1 diff --git a/tests/commands/migrate_config_test.py b/tests/commands/migrate_config_test.py index ba184636..a517d2f4 100644 --- a/tests/commands/migrate_config_test.py +++ b/tests/commands/migrate_config_test.py @@ -1,10 +1,26 @@ from __future__ import annotations +from unittest import mock + import pytest +import yaml import pre_commit.constants as C from pre_commit.clientlib import InvalidConfigError from pre_commit.commands.migrate_config import migrate_config +from pre_commit.yaml import yaml_compose + + +@pytest.fixture(autouse=True, params=['c', 'pure']) +def switch_pyyaml_impl(request): + if request.param == 'c': + yield + else: + with mock.patch.dict( + yaml_compose.keywords, + {'Loader': yaml.SafeLoader}, + ): + yield def test_migrate_config_normal_format(tmpdir, capsys): @@ -134,6 +150,27 @@ def test_migrate_config_sha_to_rev(tmpdir): ) +def test_migrate_config_sha_to_rev_json(tmp_path): + contents = """\ +{"repos": [{ + "repo": "https://github.com/pre-commit/pre-commit-hooks", + "sha": "v1.2.0", + "hooks": [] +}]} +""" + expected = """\ +{"repos": [{ + "repo": "https://github.com/pre-commit/pre-commit-hooks", + "rev": "v1.2.0", + "hooks": [] +}]} +""" + cfg = tmp_path.joinpath('cfg.yaml') + cfg.write_text(contents) + assert not migrate_config(str(cfg)) + assert cfg.read_text() == expected + + def test_migrate_config_language_python_venv(tmp_path): src = '''\ repos: @@ -167,6 +204,73 @@ repos: assert cfg.read_text() == expected +def test_migrate_config_quoted_python_venv(tmp_path): + src = '''\ +repos: +- repo: local + hooks: + - id: example + name: example + entry: example + language: "python_venv" +''' + expected = '''\ +repos: +- repo: local + hooks: + - id: example + name: example + entry: example + language: "python" +''' + cfg = tmp_path.joinpath('cfg.yaml') + cfg.write_text(src) + assert migrate_config(str(cfg)) == 0 + assert cfg.read_text() == expected + + +def test_migrate_config_default_stages(tmp_path): + src = '''\ +default_stages: [commit, push, merge-commit, commit-msg] +repos: [] +''' + expected = '''\ +default_stages: [pre-commit, pre-push, pre-merge-commit, commit-msg] +repos: [] +''' + cfg = tmp_path.joinpath('cfg.yaml') + cfg.write_text(src) + assert migrate_config(str(cfg)) == 0 + assert cfg.read_text() == expected + + +def test_migrate_config_hook_stages(tmp_path): + src = '''\ +repos: +- repo: local + hooks: + - id: example + name: example + entry: example + language: system + stages: ["commit", "push", "merge-commit", "commit-msg"] +''' + expected = '''\ +repos: +- repo: local + hooks: + - id: example + name: example + entry: example + language: system + stages: ["pre-commit", "pre-push", "pre-merge-commit", "commit-msg"] +''' + cfg = tmp_path.joinpath('cfg.yaml') + cfg.write_text(src) + assert migrate_config(str(cfg)) == 0 + assert cfg.read_text() == expected + + def test_migrate_config_invalid_yaml(tmpdir): contents = '[' cfg = tmpdir.join(C.CONFIG_FILE) diff --git a/tests/commands/run_test.py b/tests/commands/run_test.py index 50a20f37..e4af1e16 100644 --- a/tests/commands/run_test.py +++ b/tests/commands/run_test.py @@ -1104,6 +1104,19 @@ def test_fail_fast_not_prev_failures(cap_out, store, repo_with_failing_hook): assert printed.count(b'run me!') == 1 +def test_fail_fast_run_arg(cap_out, store, repo_with_failing_hook): + with modify_config() as config: + # More than one hook to demonstrate early exit + config['repos'][0]['hooks'] *= 2 + stage_a_file() + + ret, printed = _do_run( + cap_out, store, repo_with_failing_hook, run_opts(fail_fast=True), + ) + # it should have only run one hook due to the CLI flag + assert printed.count(b'Failing hook') == 1 + + def test_classifier_removes_dne(): classifier = Classifier(('this_file_does_not_exist',)) assert classifier.filenames == [] diff --git a/tests/conftest.py b/tests/conftest.py index bd4af9a5..8c9cd14d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,7 +2,6 @@ from __future__ import annotations import functools import io -import logging import os.path from unittest import mock @@ -203,12 +202,6 @@ def store(tempdir_factory): yield Store(os.path.join(tempdir_factory.get(), '.pre-commit')) -@pytest.fixture -def log_info_mock(): - with mock.patch.object(logging.getLogger('pre_commit'), 'info') as mck: - yield mck - - class Fixture: def __init__(self, stream: io.BytesIO) -> None: self._stream = stream diff --git a/tests/git_test.py b/tests/git_test.py index 93f5a1c6..02b6ce3a 100644 --- a/tests/git_test.py +++ b/tests/git_test.py @@ -141,6 +141,15 @@ def test_get_conflicted_files_unstaged_files(in_merge_conflict): assert ret == {'conflict_file'} +def test_get_conflicted_files_with_file_named_head(in_merge_conflict): + resolve_conflict() + open('HEAD', 'w').close() + cmd_output('git', 'add', 'HEAD') + + ret = set(git.get_conflicted_files()) + assert ret == {'conflict_file', 'HEAD'} + + MERGE_MSG = b"Merge branch 'foo' into bar\n\nConflicts:\n\tconflict_file\n" OTHER_MERGE_MSG = MERGE_MSG + b'\tother_conflict_file\n' diff --git a/tests/lang_base_test.py b/tests/lang_base_test.py index da289aef..9fac83da 100644 --- a/tests/lang_base_test.py +++ b/tests/lang_base_test.py @@ -164,3 +164,15 @@ def test_basic_run_hook(tmp_path): assert ret == 0 out = out.replace(b'\r\n', b'\n') assert out == b'hi hello file file file\n' + + +def test_hook_cmd(): + assert lang_base.hook_cmd('echo hi', ()) == ('echo', 'hi') + + +def test_hook_cmd_hazmat(): + ret = lang_base.hook_cmd('pre-commit hazmat cd a echo -- b', ()) + assert ret == ( + sys.executable, '-m', 'pre_commit.commands.hazmat', + 'cd', 'a', 'echo', '--', 'b', + ) diff --git a/tests/languages/dart_test.py b/tests/languages/dart_test.py index 5bb5aa68..213d888e 100644 --- a/tests/languages/dart_test.py +++ b/tests/languages/dart_test.py @@ -10,7 +10,7 @@ from testing.language_helpers import run_language def test_dart(tmp_path): pubspec_yaml = '''\ environment: - sdk: '>=2.10.0 <3.0.0' + sdk: '>=2.12.0 <4.0.0' name: hello_world_dart diff --git a/tests/languages/docker_image_test.py b/tests/languages/docker_image_test.py index 4e3a8789..4f720600 100644 --- a/tests/languages/docker_image_test.py +++ b/tests/languages/docker_image_test.py @@ -1,10 +1,18 @@ from __future__ import annotations +import pytest + from pre_commit.languages import docker_image +from pre_commit.util import cmd_output_b from testing.language_helpers import run_language from testing.util import xfailif_windows +@pytest.fixture(autouse=True, scope='module') +def _ensure_image_available(): + cmd_output_b('docker', 'run', '--rm', 'ubuntu:22.04', 'echo') + + @xfailif_windows # pragma: win32 no cover def test_docker_image_hook_via_entrypoint(tmp_path): ret = run_language( diff --git a/tests/languages/docker_test.py b/tests/languages/docker_test.py index 836382a8..e269976f 100644 --- a/tests/languages/docker_test.py +++ b/tests/languages/docker_test.py @@ -14,40 +14,173 @@ from pre_commit.util import CalledProcessError from testing.language_helpers import run_language from testing.util import xfailif_windows -DOCKER_CGROUP_EXAMPLE = b'''\ -12:hugetlb:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -11:blkio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -10:freezer:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -9:cpu,cpuacct:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -8:pids:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -7:rdma:/ -6:net_cls,net_prio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -5:cpuset:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -4:devices:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -3:memory:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -2:perf_event:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -1:name=systemd:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 -0::/system.slice/containerd.service +DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\ +759 717 0:52 / / rw,relatime master:300 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/PCPE5P5IVGM7CFCPJR353N3ONK:/var/lib/docker/overlay2/l/EQFSDHFAJ333VEMEJD4ZTRIZCB,upperdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/diff,workdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/work +760 759 0:58 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +761 759 0:59 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +762 761 0:60 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +763 759 0:61 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +764 763 0:62 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs tmpfs rw,mode=755,inode64 +765 764 0:29 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime master:11 - cgroup cgroup rw,xattr,name=systemd +766 764 0:32 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime master:15 - cgroup cgroup rw,rdma +767 764 0:33 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime master:16 - cgroup cgroup rw,cpu,cpuacct +768 764 0:34 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime master:17 - cgroup cgroup rw,cpuset +769 764 0:35 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime master:18 - cgroup cgroup rw,pids +770 764 0:36 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime master:19 - cgroup cgroup rw,memory +771 764 0:37 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime master:20 - cgroup cgroup rw,perf_event +772 764 0:38 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime master:21 - cgroup cgroup rw,net_cls,net_prio +773 764 0:39 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime master:22 - cgroup cgroup rw,blkio +774 764 0:40 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime master:23 - cgroup cgroup rw,misc +775 764 0:41 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime master:24 - cgroup cgroup rw,hugetlb +776 764 0:42 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime master:25 - cgroup cgroup rw,devices +777 764 0:43 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime master:26 - cgroup cgroup rw,freezer +778 761 0:57 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +779 761 0:63 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64 +780 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro +781 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro +782 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro +718 761 0:60 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +719 760 0:58 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +720 760 0:58 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +721 760 0:58 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +722 760 0:58 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +723 760 0:58 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +724 760 0:64 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64 +725 760 0:65 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64 +726 760 0:59 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +727 760 0:59 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +728 760 0:59 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +729 760 0:66 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64 +730 763 0:67 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64 +731 763 0:68 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs ro,inode64 +''' # noqa: E501 + +DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\ +721 386 0:45 / / rw,relatime master:218 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/QHZ7OM7P4AQD3XLG274ZPWAJCV:/var/lib/docker/overlay2/l/5RFG6SZWVGOG2NKEYXJDQCQYX5,upperdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/diff,workdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/work,nouserxattr +722 721 0:48 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +723 721 0:50 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +724 723 0:51 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +725 721 0:52 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +726 725 0:26 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw,nsdelegate,memory_recursiveprot +727 723 0:47 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +728 723 0:53 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64 +729 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro +730 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro +731 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro +387 723 0:51 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +388 722 0:48 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +389 722 0:48 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +525 722 0:48 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +526 722 0:48 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +571 722 0:48 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +572 722 0:57 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64 +575 722 0:58 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64 +576 722 0:50 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +577 722 0:50 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +578 722 0:50 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64 +579 722 0:59 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64 +580 725 0:60 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64 +''' # noqa: E501 + +PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\ +1200 915 0:57 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/ZWAU3VY3ZHABQJRBUAFPBX7R5D,upperdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/work,volatile,userxattr +1204 1200 0:62 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +1205 1200 0:63 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64 +1206 1200 0:64 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw +1207 1205 0:65 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 +1208 1205 0:61 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +1209 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 +1210 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 +1211 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 +1212 1205 0:56 / /dev/shm rw,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64 +1213 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64 +1214 1206 0:66 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs cgroup rw,size=1024k,uid=1000,gid=1000,inode64 +1215 1214 0:43 / /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,freezer +1216 1214 0:42 /user.slice /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,devices +1217 1214 0:41 / /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,hugetlb +1218 1214 0:40 / /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,misc +1219 1214 0:39 / /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,blkio +1220 1214 0:38 / /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,net_cls,net_prio +1221 1214 0:37 / /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,perf_event +1222 1214 0:36 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,memory +1223 1214 0:35 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,pids +1224 1214 0:34 / /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpuset +1225 1214 0:33 / /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpu,cpuacct +1226 1214 0:32 / /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,rdma +1227 1214 0:29 /user.slice/user-1000.slice/user@1000.service/apps.slice/apps-org.gnome.Terminal.slice/vte-spawn-0c50448e-b395-4d76-8b92-379f16e5066f.scope /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,xattr,name=systemd +1228 1205 0:5 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1229 1205 0:5 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1230 1205 0:5 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1231 1205 0:5 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1232 1205 0:5 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1233 1205 0:5 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1234 1204 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +1235 1204 0:5 /null /proc/kcore rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1236 1204 0:5 /null /proc/keys rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1237 1204 0:5 /null /proc/timer_list rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64 +1238 1204 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +1239 1206 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +1240 1206 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +1241 1204 0:62 /asound /proc/asound ro,relatime - proc proc rw +1242 1204 0:62 /bus /proc/bus ro,relatime - proc proc rw +1243 1204 0:62 /fs /proc/fs ro,relatime - proc proc rw +1244 1204 0:62 /irq /proc/irq ro,relatime - proc proc rw +1245 1204 0:62 /sys /proc/sys ro,relatime - proc proc rw +1256 1204 0:62 /sysrq-trigger /proc/sysrq-trigger ro,relatime - proc proc rw +916 1205 0:65 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 +''' # noqa: E501 + +PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\ +685 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 +686 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 +687 692 0:50 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64 +688 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 +689 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64 +690 546 0:55 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/NPOHYOD3PI3YW6TQSGBOVOUSK6,upperdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/work,redirect_dir=nofollow,uuid=on,volatile,userxattr +691 690 0:59 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +692 690 0:61 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64 +693 690 0:62 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw +694 692 0:66 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 +695 692 0:58 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +696 693 0:28 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup2 rw,nsdelegate,memory_recursiveprot +698 692 0:6 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +699 692 0:6 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +700 692 0:6 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +701 692 0:6 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +702 692 0:6 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +703 692 0:6 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +704 691 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +705 691 0:6 /null /proc/kcore ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +706 691 0:6 /null /proc/keys ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +707 691 0:6 /null /proc/latency_stats ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +708 691 0:6 /null /proc/timer_list ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64 +709 691 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +710 693 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +711 693 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +712 693 0:71 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64 +713 691 0:59 /asound /proc/asound ro,nosuid,nodev,noexec,relatime - proc proc rw +714 691 0:59 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +715 691 0:59 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +716 691 0:59 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +717 691 0:59 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +718 691 0:59 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +547 692 0:66 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666 ''' # noqa: E501 # The ID should match the above cgroup example. CONTAINER_ID = 'c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7' # noqa: E501 -NON_DOCKER_CGROUP_EXAMPLE = b'''\ -12:perf_event:/ -11:hugetlb:/ -10:devices:/ -9:blkio:/ -8:rdma:/ -7:cpuset:/ -6:cpu,cpuacct:/ -5:freezer:/ -4:memory:/ -3:pids:/ -2:net_cls,net_prio:/ -1:name=systemd:/init.scope -0::/init.scope -''' +NON_DOCKER_MOUNTINFO_EXAMPLE = b'''\ +21 27 0:19 / /sys rw,nosuid,nodev,noexec,relatime shared:7 - sysfs sysfs rw +22 27 0:20 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw +23 27 0:5 / /dev rw,nosuid,relatime shared:2 - devtmpfs udev rw,size=10219484k,nr_inodes=2554871,mode=755,inode64 +24 23 0:21 / /dev/pts rw,nosuid,noexec,relatime shared:3 - devpts devpts rw,gid=5,mode=620,ptmxmode=000 +25 27 0:22 / /run rw,nosuid,nodev,noexec,relatime shared:5 - tmpfs tmpfs rw,size=2047768k,mode=755,inode64 +27 1 8:2 / / rw,relatime shared:1 - ext4 /dev/sda2 rw,errors=remount-ro +28 21 0:6 / /sys/kernel/security rw,nosuid,nodev,noexec,relatime shared:8 - securityfs securityfs rw +29 23 0:24 / /dev/shm rw,nosuid,nodev shared:4 - tmpfs tmpfs rw,inode64 +30 25 0:25 / /run/lock rw,nosuid,nodev,noexec,relatime shared:6 - tmpfs tmpfs rw,size=5120k,inode64 +''' # noqa: E501 def test_docker_fallback_user(): @@ -62,9 +195,46 @@ def test_docker_fallback_user(): assert docker.get_docker_user() == () -def test_in_docker_no_file(): +@pytest.fixture(autouse=True) +def _avoid_cache(): + with mock.patch.object( + docker, + '_is_rootless', + docker._is_rootless.__wrapped__, + ): + yield + + +@pytest.mark.parametrize( + 'info_ret', + ( + (0, b'{"SecurityOptions": ["name=rootless","name=cgroupns"]}', b''), + (0, b'{"host": {"security": {"rootless": true}}}', b''), + ), +) +def test_docker_user_rootless(info_ret): + with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret): + assert docker.get_docker_user() == () + + +@pytest.mark.parametrize( + 'info_ret', + ( + (0, b'{"SecurityOptions": ["name=cgroupns"]}', b''), + (0, b'{"host": {"security": {"rootless": false}}}', b''), + (0, b'{"response_from_some_other_container_engine": true}', b''), + (0, b'{"SecurityOptions": null}', b''), + (1, b'', b''), + ), +) +def test_docker_user_non_rootless(info_ret): + with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret): + assert docker.get_docker_user() != () + + +def test_container_id_no_file(): with mock.patch.object(builtins, 'open', side_effect=FileNotFoundError): - assert docker._is_in_docker() is False + assert docker._get_container_id() is None def _mock_open(data): @@ -76,38 +246,33 @@ def _mock_open(data): ) -def test_in_docker_docker_in_file(): - with _mock_open(DOCKER_CGROUP_EXAMPLE): - assert docker._is_in_docker() is True - - -def test_in_docker_docker_not_in_file(): - with _mock_open(NON_DOCKER_CGROUP_EXAMPLE): - assert docker._is_in_docker() is False +def test_container_id_not_in_file(): + with _mock_open(NON_DOCKER_MOUNTINFO_EXAMPLE): + assert docker._get_container_id() is None def test_get_container_id(): - with _mock_open(DOCKER_CGROUP_EXAMPLE): + with _mock_open(DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE): + assert docker._get_container_id() == CONTAINER_ID + with _mock_open(DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE): + assert docker._get_container_id() == CONTAINER_ID + with _mock_open(PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE): + assert docker._get_container_id() == CONTAINER_ID + with _mock_open(PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE): assert docker._get_container_id() == CONTAINER_ID -def test_get_container_id_failure(): - with _mock_open(b''), pytest.raises(RuntimeError): - docker._get_container_id() - - def test_get_docker_path_not_in_docker_returns_same(): - with mock.patch.object(docker, '_is_in_docker', return_value=False): + with _mock_open(b''): assert docker._get_docker_path('abc') == 'abc' @pytest.fixture def in_docker(): - with mock.patch.object(docker, '_is_in_docker', return_value=True): - with mock.patch.object( - docker, '_get_container_id', return_value=CONTAINER_ID, - ): - yield + with mock.patch.object( + docker, '_get_container_id', return_value=CONTAINER_ID, + ): + yield def _linux_commonpath(): @@ -195,3 +360,14 @@ CMD ["echo", "This is overwritten by the entry"'] ret = run_language(tmp_path, docker, 'echo hello hello world') assert ret == (0, b'hello hello world\n') + + +@xfailif_windows # pragma: win32 no cover +def test_docker_hook_mount_permissions(tmp_path): + dockerfile = '''\ +FROM ubuntu:22.04 +''' + tmp_path.joinpath('Dockerfile').write_text(dockerfile) + + retcode, _ = run_language(tmp_path, docker, 'touch', ('README.md',)) + assert retcode == 0 diff --git a/tests/languages/dotnet_test.py b/tests/languages/dotnet_test.py index 470c03b2..ee408256 100644 --- a/tests/languages/dotnet_test.py +++ b/tests/languages/dotnet_test.py @@ -27,7 +27,7 @@ def _csproj(tool_name): Exe - net6 + net8 true {tool_name} ./nupkg diff --git a/tests/languages/golang_test.py b/tests/languages/golang_test.py index 02e35d71..7fb6ab18 100644 --- a/tests/languages/golang_test.py +++ b/tests/languages/golang_test.py @@ -11,11 +11,13 @@ from pre_commit.commands.install_uninstall import install from pre_commit.envcontext import envcontext from pre_commit.languages import golang from pre_commit.store import _make_local_repo +from pre_commit.util import CalledProcessError from pre_commit.util import cmd_output from testing.fixtures import add_config_to_repo from testing.fixtures import make_config_from_repo from testing.language_helpers import run_language from testing.util import cmd_output_mocked_pre_commit_home +from testing.util import cwd from testing.util import git_commit @@ -165,3 +167,70 @@ def test_during_commit_all(tmp_path, tempdir_factory, store, in_git_dir): fn=cmd_output_mocked_pre_commit_home, tempdir_factory=tempdir_factory, ) + + +def test_automatic_toolchain_switching(tmp_path): + go_mod = '''\ +module toolchain-version-test + +go 1.23.1 +''' + main_go = '''\ +package main + +func main() {} +''' + tmp_path.joinpath('go.mod').write_text(go_mod) + mod_dir = tmp_path.joinpath('toolchain-version-test') + mod_dir.mkdir() + main_file = mod_dir.joinpath('main.go') + main_file.write_text(main_go) + + with pytest.raises(CalledProcessError) as excinfo: + run_language( + path=tmp_path, + language=golang, + version='1.22.0', + exe='golang-version-test', + ) + + assert 'go.mod requires go >= 1.23.1' in excinfo.value.stderr.decode() + + +def test_automatic_toolchain_switching_go_fmt(tmp_path, monkeypatch): + go_mod_hook = '''\ +module toolchain-version-test + +go 1.22.0 +''' + go_mod = '''\ +module toolchain-version-test + +go 1.23.1 +''' + main_go = '''\ +package main + +func main() {} +''' + hook_dir = tmp_path.joinpath('hook') + hook_dir.mkdir() + hook_dir.joinpath('go.mod').write_text(go_mod_hook) + + test_dir = tmp_path.joinpath('test') + test_dir.mkdir() + test_dir.joinpath('go.mod').write_text(go_mod) + main_file = test_dir.joinpath('main.go') + main_file.write_text(main_go) + + with cwd(test_dir): + ret, out = run_language( + path=hook_dir, + language=golang, + version='1.22.0', + exe='go fmt', + file_args=(str(main_file),), + ) + + assert ret == 1 + assert 'go.mod requires go >= 1.23.1' in out.decode() diff --git a/tests/languages/julia_test.py b/tests/languages/julia_test.py new file mode 100644 index 00000000..175622d6 --- /dev/null +++ b/tests/languages/julia_test.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import os +from unittest import mock + +from pre_commit.languages import julia +from testing.language_helpers import run_language +from testing.util import cwd + + +def _make_hook(tmp_path, julia_code): + src_dir = tmp_path.joinpath('src') + src_dir.mkdir() + src_dir.joinpath('main.jl').write_text(julia_code) + tmp_path.joinpath('Project.toml').write_text( + '[deps]\n' + 'Example = "7876af07-990d-54b4-ab0e-23690620f79a"\n', + ) + + +def test_julia_hook(tmp_path): + code = """ + using Example + function main() + println("Hello, world!") + end + main() + """ + _make_hook(tmp_path, code) + expected = (0, b'Hello, world!\n') + assert run_language(tmp_path, julia, 'src/main.jl') == expected + + +def test_julia_hook_with_startup(tmp_path): + depot_path = tmp_path.joinpath('depot') + depot_path.joinpath('config').mkdir(parents=True) + startup = depot_path.joinpath('config', 'startup.jl') + startup.write_text('error("Startup file used!")\n') + + depo_path_var = f'{depot_path}{os.pathsep}' + with mock.patch.dict(os.environ, {'JULIA_DEPOT_PATH': depo_path_var}): + test_julia_hook(tmp_path) + + +def test_julia_hook_manifest(tmp_path): + code = """ + using Example + println(pkgversion(Example)) + """ + _make_hook(tmp_path, code) + + tmp_path.joinpath('Manifest.toml').write_text( + 'manifest_format = "2.0"\n\n' + '[[deps.Example]]\n' + 'git-tree-sha1 = "11820aa9c229fd3833d4bd69e5e75ef4e7273bf1"\n' + 'uuid = "7876af07-990d-54b4-ab0e-23690620f79a"\n' + 'version = "0.5.4"\n', + ) + expected = (0, b'0.5.4\n') + assert run_language(tmp_path, julia, 'src/main.jl') == expected + + +def test_julia_hook_args(tmp_path): + code = """ + function main(argv) + foreach(println, argv) + end + main(ARGS) + """ + _make_hook(tmp_path, code) + expected = (0, b'--arg1\n--arg2\n') + assert run_language( + tmp_path, julia, 'src/main.jl --arg1 --arg2', + ) == expected + + +def test_julia_hook_additional_deps(tmp_path): + code = """ + using TOML + function main() + project_file = Base.active_project() + dict = TOML.parsefile(project_file) + for (k, v) in dict["deps"] + println(k, " = ", v) + end + end + main() + """ + _make_hook(tmp_path, code) + deps = ('TOML=fa267f1f-6049-4f14-aa54-33bafae1ed76',) + ret, out = run_language(tmp_path, julia, 'src/main.jl', deps=deps) + assert ret == 0 + assert b'Example = 7876af07-990d-54b4-ab0e-23690620f79a' in out + assert b'TOML = fa267f1f-6049-4f14-aa54-33bafae1ed76' in out + + +def test_julia_repo_local(tmp_path): + env_dir = tmp_path.joinpath('envdir') + env_dir.mkdir() + local_dir = tmp_path.joinpath('local') + local_dir.mkdir() + local_dir.joinpath('local.jl').write_text( + 'using TOML; foreach(println, ARGS)', + ) + with cwd(local_dir): + deps = ('TOML=fa267f1f-6049-4f14-aa54-33bafae1ed76',) + expected = (0, b'--local-arg1\n--local-arg2\n') + assert run_language( + env_dir, julia, 'local.jl --local-arg1 --local-arg2', + deps=deps, is_local=True, + ) == expected diff --git a/tests/languages/python_test.py b/tests/languages/python_test.py index ab26e14e..593634b7 100644 --- a/tests/languages/python_test.py +++ b/tests/languages/python_test.py @@ -10,8 +10,11 @@ import pre_commit.constants as C from pre_commit.envcontext import envcontext from pre_commit.languages import python from pre_commit.prefix import Prefix +from pre_commit.store import _make_local_repo +from pre_commit.util import cmd_output_b from pre_commit.util import make_executable from pre_commit.util import win_exe +from testing.auto_namedtuple import auto_namedtuple from testing.language_helpers import run_language @@ -34,6 +37,72 @@ def test_read_pyvenv_cfg_non_utf8(tmpdir): assert python._read_pyvenv_cfg(pyvenv_cfg) == expected +def _get_default_version( + *, + impl: str, + exe: str, + found: set[str], + version: tuple[int, int], +) -> str: + sys_exe = f'/fake/path/{exe}' + sys_impl = auto_namedtuple(name=impl) + sys_ver = auto_namedtuple(major=version[0], minor=version[1]) + + def find_exe(s): + if s in found: + return f'/fake/path/found/{exe}' + else: + return None + + with ( + mock.patch.object(sys, 'implementation', sys_impl), + mock.patch.object(sys, 'executable', sys_exe), + mock.patch.object(sys, 'version_info', sys_ver), + mock.patch.object(python, 'find_executable', find_exe), + ): + return python.get_default_version.__wrapped__() + + +def test_default_version_sys_executable_found(): + ret = _get_default_version( + impl='cpython', + exe='python3.12', + found={'python3.12'}, + version=(3, 12), + ) + assert ret == 'python3.12' + + +def test_default_version_picks_specific_when_found(): + ret = _get_default_version( + impl='cpython', + exe='python3', + found={'python3', 'python3.12'}, + version=(3, 12), + ) + assert ret == 'python3.12' + + +def test_default_version_picks_pypy_versioned_exe(): + ret = _get_default_version( + impl='pypy', + exe='python', + found={'pypy3.12', 'python3'}, + version=(3, 12), + ) + assert ret == 'pypy3.12' + + +def test_default_version_picks_pypy_unversioned_exe(): + ret = _get_default_version( + impl='pypy', + exe='python', + found={'pypy3', 'python3'}, + version=(3, 12), + ) + assert ret == 'pypy3' + + def test_norm_version_expanduser(): home = os.path.expanduser('~') if sys.platform == 'win32': # pragma: win32 cover @@ -284,3 +353,15 @@ def test_python_hook_weird_setup_cfg(tmp_path): ret = run_language(tmp_path, python, 'socks', [os.devnull]) assert ret == (0, f'[{os.devnull!r}]\nhello hello\n'.encode()) + + +def test_local_repo_with_other_artifacts(tmp_path): + cmd_output_b('git', 'init', tmp_path) + _make_local_repo(str(tmp_path)) + # pretend a rust install also ran here + tmp_path.joinpath('target').mkdir() + + ret, out = run_language(tmp_path, python, 'python --version') + + assert ret == 0 + assert out.startswith(b'Python ') diff --git a/tests/languages/r_test.py b/tests/languages/r_test.py index 10919e4a..9e73129e 100644 --- a/tests/languages/r_test.py +++ b/tests/languages/r_test.py @@ -286,7 +286,7 @@ def test_health_check_without_version(prefix, installed_environment, version): prefix, env_dir = installed_environment # simulate old pre-commit install by unsetting the installed version - r._execute_vanilla_r_code_as_script( + r._execute_r_in_renv( f'renv::settings$r.version({version})', prefix=prefix, version=C.DEFAULT, cwd=env_dir, ) diff --git a/tests/languages/system_test.py b/tests/languages/system_test.py deleted file mode 100644 index dcd9cf1e..00000000 --- a/tests/languages/system_test.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - -from pre_commit.languages import system -from testing.language_helpers import run_language - - -def test_system_language(tmp_path): - expected = (0, b'hello hello world\n') - assert run_language(tmp_path, system, 'echo hello hello world') == expected diff --git a/tests/languages/script_test.py b/tests/languages/unsupported_script_test.py similarity index 63% rename from tests/languages/script_test.py rename to tests/languages/unsupported_script_test.py index a02f615a..b15b67e7 100644 --- a/tests/languages/script_test.py +++ b/tests/languages/unsupported_script_test.py @@ -1,14 +1,14 @@ from __future__ import annotations -from pre_commit.languages import script +from pre_commit.languages import unsupported_script from pre_commit.util import make_executable from testing.language_helpers import run_language -def test_script_language(tmp_path): +def test_unsupported_script_language(tmp_path): exe = tmp_path.joinpath('main') exe.write_text('#!/usr/bin/env bash\necho hello hello world\n') make_executable(exe) expected = (0, b'hello hello world\n') - assert run_language(tmp_path, script, 'main') == expected + assert run_language(tmp_path, unsupported_script, 'main') == expected diff --git a/tests/languages/unsupported_test.py b/tests/languages/unsupported_test.py new file mode 100644 index 00000000..7f8461e0 --- /dev/null +++ b/tests/languages/unsupported_test.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +from pre_commit.languages import unsupported +from testing.language_helpers import run_language + + +def test_unsupported_language(tmp_path): + expected = (0, b'hello hello world\n') + ret = run_language(tmp_path, unsupported, 'echo hello hello world') + assert ret == expected diff --git a/tests/main_test.py b/tests/main_test.py index 945349fa..fed085fc 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -1,6 +1,7 @@ from __future__ import annotations import argparse +import contextlib import os.path from unittest import mock @@ -8,6 +9,7 @@ import pytest import pre_commit.constants as C from pre_commit import main +from pre_commit.commands import hazmat from pre_commit.errors import FatalError from pre_commit.util import cmd_output from testing.auto_namedtuple import auto_namedtuple @@ -97,11 +99,9 @@ CMDS = tuple(fn.replace('_', '-') for fn in FNS) @pytest.fixture def mock_commands(): - mcks = {fn: mock.patch.object(main, fn).start() for fn in FNS} - ret = auto_namedtuple(**mcks) - yield ret - for mck in ret: - mck.stop() + with contextlib.ExitStack() as ctx: + mcks = {f: ctx.enter_context(mock.patch.object(main, f)) for f in FNS} + yield auto_namedtuple(**mcks) @pytest.fixture @@ -158,6 +158,17 @@ def test_all_cmds(command, mock_commands, mock_store_dir): assert_only_one_mock_called(mock_commands) +def test_hazmat(mock_store_dir): + with mock.patch.object(hazmat, 'impl') as mck: + main.main(('hazmat', 'cd', 'subdir', '--', 'cmd', '--', 'f1', 'f2')) + assert mck.call_count == 1 + (arg,), dct = mck.call_args + assert dct == {} + assert arg.tool == 'cd' + assert arg.subdir == 'subdir' + assert arg.cmd == ['cmd', '--', 'f1', 'f2'] + + def test_try_repo(mock_store_dir): with mock.patch.object(main, 'try_repo') as patch: main.main(('try-repo', '.')) diff --git a/tests/repository_test.py b/tests/repository_test.py index ac065ec4..5d71c3e4 100644 --- a/tests/repository_test.py +++ b/tests/repository_test.py @@ -17,7 +17,7 @@ from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import load_manifest from pre_commit.hook import Hook from pre_commit.languages import python -from pre_commit.languages import system +from pre_commit.languages import unsupported from pre_commit.prefix import Prefix from pre_commit.repository import _hook_installed from pre_commit.repository import all_hooks @@ -80,31 +80,6 @@ def _test_hook_repo( assert out == expected -def test_python_venv_deprecation(store, caplog): - config = { - 'repo': 'local', - 'hooks': [{ - 'id': 'example', - 'name': 'example', - 'language': 'python_venv', - 'entry': 'echo hi', - }], - } - _get_hook(config, store, 'example') - assert caplog.messages[-1] == ( - '`repo: local` uses deprecated `language: python_venv`. ' - 'This is an alias for `language: python`. ' - 'Often `pre-commit autoupdate --repo local` will fix this.' - ) - - -def test_system_hook_with_spaces(tempdir_factory, store): - _test_hook_repo( - tempdir_factory, store, 'system_hook_with_spaces_repo', - 'system-hook-with-spaces', [os.devnull], b'Hello World\n', - ) - - def test_missing_executable(tempdir_factory, store): _test_hook_repo( tempdir_factory, store, 'not_found_exe', @@ -240,16 +215,16 @@ def test_unknown_keys(store, caplog): assert msg == 'Unexpected key(s) present on local => too-much: foo, hello' -def test_reinstall(tempdir_factory, store, log_info_mock): +def test_reinstall(tempdir_factory, store, caplog): path = make_repo(tempdir_factory, 'python_hooks_repo') config = make_config_from_repo(path) _get_hook(config, store, 'foo') # We print some logging during clone (1) + install (3) - assert log_info_mock.call_count == 4 - log_info_mock.reset_mock() + assert len(caplog.record_tuples) == 4 + caplog.clear() # Reinstall on another run should not trigger another install _get_hook(config, store, 'foo') - assert log_info_mock.call_count == 0 + assert len(caplog.record_tuples) == 0 def test_control_c_control_c_on_install(tempdir_factory, store): @@ -449,7 +424,7 @@ def test_manifest_hooks(tempdir_factory, store): exclude_types=[], files='', id='bash_hook', - language='script', + language='unsupported_script', language_version='default', log_file='', minimum_pre_commit_version='0', @@ -482,7 +457,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog): 'hooks': [{ 'id': 'system-hook', 'name': 'system-hook', - 'language': 'system', + 'language': 'unsupported', 'entry': 'python3 -c "import sys; print(sys.version)"', 'language_version': 'python3.10', }], @@ -494,7 +469,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog): msg, = caplog.messages assert msg == ( 'The hook `system-hook` specifies `language_version` but is using ' - 'language `system` which does not install an environment. ' + 'language `unsupported` which does not install an environment. ' 'Perhaps you meant to use a specific language?' ) @@ -505,7 +480,7 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog): 'hooks': [{ 'id': 'system-hook', 'name': 'system-hook', - 'language': 'system', + 'language': 'unsupported', 'entry': 'python3 -c "import sys; print(sys.version)"', 'additional_dependencies': ['astpretty'], }], @@ -517,17 +492,28 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog): msg, = caplog.messages assert msg == ( 'The hook `system-hook` specifies `additional_dependencies` but is ' - 'using language `system` which does not install an environment. ' + 'using language `unsupported` which does not install an environment. ' 'Perhaps you meant to use a specific language?' ) def test_args_with_spaces_and_quotes(tmp_path): ret = run_language( - tmp_path, system, + tmp_path, unsupported, f"{shlex.quote(sys.executable)} -c 'import sys; print(sys.argv[1:])'", ('i have spaces', 'and"\'quotes', '$and !this'), ) expected = b"['i have spaces', 'and\"\\'quotes', '$and !this']\n" assert ret == (0, expected) + + +def test_hazmat(tmp_path): + ret = run_language( + tmp_path, unsupported, + f'pre-commit hazmat ignore-exit-code {shlex.quote(sys.executable)} ' + f"-c 'import sys; raise SystemExit(sys.argv[1:])'", + ('f1', 'f2'), + ) + expected = b"['f1', 'f2']\n" + assert ret == (0, expected) diff --git a/tests/store_test.py b/tests/store_test.py index 45ec7327..13f198ea 100644 --- a/tests/store_test.py +++ b/tests/store_test.py @@ -1,12 +1,15 @@ from __future__ import annotations +import logging import os.path +import shlex import sqlite3 import stat from unittest import mock import pytest +import pre_commit.constants as C from pre_commit import git from pre_commit.store import _get_default_directory from pre_commit.store import _LOCAL_RESOURCES @@ -19,6 +22,17 @@ from testing.util import git_commit from testing.util import xfailif_windows +def _select_all_configs(store: Store) -> list[str]: + with store.connect() as db: + rows = db.execute('SELECT * FROM configs').fetchall() + return [path for path, in rows] + + +def _select_all_repos(store: Store) -> list[tuple[str, str, str]]: + with store.connect() as db: + return db.execute('SELECT repo, ref, path FROM repos').fetchall() + + def test_our_session_fixture_works(): """There's a session fixture which makes `Store` invariantly raise to prevent writing to the home directory. @@ -65,7 +79,7 @@ def test_store_init(store): assert text_line in readme_contents -def test_clone(store, tempdir_factory, log_info_mock): +def test_clone(store, tempdir_factory, caplog): path = git_dir(tempdir_factory) with cwd(path): git_commit() @@ -74,7 +88,7 @@ def test_clone(store, tempdir_factory, log_info_mock): ret = store.clone(path, rev) # Should have printed some stuff - assert log_info_mock.call_args_list[0][0][0].startswith( + assert caplog.record_tuples[0][-1].startswith( 'Initializing environment for ', ) @@ -88,7 +102,73 @@ def test_clone(store, tempdir_factory, log_info_mock): assert git.head_rev(ret) == rev # Assert there's an entry in the sqlite db for this - assert store.select_all_repos() == [(path, rev, ret)] + assert _select_all_repos(store) == [(path, rev, ret)] + + +def test_warning_for_deprecated_stages_on_init(store, tempdir_factory, caplog): + manifest = '''\ +- id: hook1 + name: hook1 + language: system + entry: echo hook1 + stages: [commit, push] +- id: hook2 + name: hook2 + language: system + entry: echo hook2 + stages: [push, merge-commit] +''' + + path = git_dir(tempdir_factory) + with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f: + f.write(manifest) + cmd_output('git', 'add', '.', cwd=path) + git_commit(cwd=path) + rev = git.head_rev(path) + + store.clone(path, rev) + assert caplog.record_tuples[1] == ( + 'pre_commit', + logging.WARNING, + f'repo `{path}` uses deprecated stage names ' + f'(commit, push, merge-commit) which will be removed in a future ' + f'version. ' + f'Hint: often `pre-commit autoupdate --repo {shlex.quote(path)}` ' + f'will fix this. ' + f'if it does not -- consider reporting an issue to that repo.', + ) + + # should not re-warn + caplog.clear() + store.clone(path, rev) + assert caplog.record_tuples == [] + + +def test_no_warning_for_non_deprecated_stages_on_init( + store, tempdir_factory, caplog, +): + manifest = '''\ +- id: hook1 + name: hook1 + language: system + entry: echo hook1 + stages: [pre-commit, pre-push] +- id: hook2 + name: hook2 + language: system + entry: echo hook2 + stages: [pre-push, pre-merge-commit] +''' + + path = git_dir(tempdir_factory) + with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f: + f.write(manifest) + cmd_output('git', 'add', '.', cwd=path) + git_commit(cwd=path) + rev = git.head_rev(path) + + store.clone(path, rev) + assert logging.WARNING not in {tup[1] for tup in caplog.record_tuples} def test_clone_cleans_up_on_checkout_failure(store): @@ -118,7 +198,7 @@ def test_clone_when_repo_already_exists(store): def test_clone_shallow_failure_fallback_to_complete( store, tempdir_factory, - log_info_mock, + caplog, ): path = git_dir(tempdir_factory) with cwd(path): @@ -134,7 +214,7 @@ def test_clone_shallow_failure_fallback_to_complete( ret = store.clone(path, rev) # Should have printed some stuff - assert log_info_mock.call_args_list[0][0][0].startswith( + assert caplog.record_tuples[0][-1].startswith( 'Initializing environment for ', ) @@ -148,7 +228,7 @@ def test_clone_shallow_failure_fallback_to_complete( assert git.head_rev(ret) == rev # Assert there's an entry in the sqlite db for this - assert store.select_all_repos() == [(path, rev, ret)] + assert _select_all_repos(store) == [(path, rev, ret)] def test_clone_tag_not_on_mainline(store, tempdir_factory): @@ -196,7 +276,7 @@ def test_mark_config_as_used(store, tmpdir): with tmpdir.as_cwd(): f = tmpdir.join('f').ensure() store.mark_config_used('f') - assert store.select_all_configs() == [f.strpath] + assert _select_all_configs(store) == [f.strpath] def test_mark_config_as_used_idempotent(store, tmpdir): @@ -206,21 +286,12 @@ def test_mark_config_as_used_idempotent(store, tmpdir): def test_mark_config_as_used_does_not_exist(store): store.mark_config_used('f') - assert store.select_all_configs() == [] - - -def _simulate_pre_1_14_0(store): - with store.connect() as db: - db.executescript('DROP TABLE configs') - - -def test_select_all_configs_roll_forward(store): - _simulate_pre_1_14_0(store) - assert store.select_all_configs() == [] + assert _select_all_configs(store) == [] def test_mark_config_as_used_roll_forward(store, tmpdir): - _simulate_pre_1_14_0(store) + with store.connect() as db: # simulate pre-1.14.0 + db.executescript('DROP TABLE configs') test_mark_config_as_used(store, tmpdir) @@ -245,7 +316,7 @@ def test_mark_config_as_used_readonly(tmpdir): assert store.readonly # should be skipped due to readonly store.mark_config_used(str(cfg)) - assert store.select_all_configs() == [] + assert _select_all_configs(store) == [] def test_clone_with_recursive_submodules(store, tmp_path): diff --git a/tests/yaml_rewrite_test.py b/tests/yaml_rewrite_test.py new file mode 100644 index 00000000..d0f6841c --- /dev/null +++ b/tests/yaml_rewrite_test.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import pytest + +from pre_commit.yaml import yaml_compose +from pre_commit.yaml_rewrite import MappingKey +from pre_commit.yaml_rewrite import MappingValue +from pre_commit.yaml_rewrite import match +from pre_commit.yaml_rewrite import SequenceItem + + +def test_match_produces_scalar_values_only(): + src = '''\ +- name: foo +- name: [not, foo] # not a scalar: should be skipped! +- name: bar +''' + matcher = (SequenceItem(), MappingValue('name')) + ret = [n.value for n in match(yaml_compose(src), matcher)] + assert ret == ['foo', 'bar'] + + +@pytest.mark.parametrize('cls', (MappingKey, MappingValue)) +def test_mapping_not_a_map(cls): + m = cls('s') + assert list(m.match(yaml_compose('[foo]'))) == [] + + +def test_sequence_item_not_a_sequence(): + assert list(SequenceItem().match(yaml_compose('s: val'))) == [] + + +def test_mapping_key(): + m = MappingKey('s') + ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))] + assert ret == ['s'] + + +def test_mapping_value(): + m = MappingValue('s') + ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))] + assert ret == ['val'] + + +def test_sequence_item(): + ret = [n.value for n in SequenceItem().match(yaml_compose('[a, b, c]'))] + assert ret == ['a', 'b', 'c']