mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Compare commits
89 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8416413a0e | ||
|
|
37a879e65e | ||
|
|
8a0630ca1a | ||
|
|
fcbc745744 | ||
|
|
51592eecec | ||
|
|
67e8faf80b | ||
|
|
c251e6b6d0 | ||
|
|
98ccafa3ce | ||
|
|
48953556d0 | ||
|
|
2cedd58e69 | ||
|
|
465192d7de | ||
|
|
fd42f96874 | ||
|
|
8ea2b790d8 | ||
|
|
1af6c8fa95 | ||
|
|
3358a3b540 | ||
|
|
bdf68790b7 | ||
|
|
e436690f14 | ||
|
|
8d34f95308 | ||
|
|
9c7ea88ab9 | ||
|
|
844dacc168 | ||
|
|
6a1d543e52 | ||
|
|
66278a9a0b | ||
|
|
1b32c50bc7 | ||
|
|
063229aee7 | ||
|
|
49e28eea48 | ||
|
|
d5c273a2ba | ||
|
|
17cf886473 | ||
|
|
cb63a5cb9a | ||
|
|
f80801d75a | ||
|
|
9143fc3545 | ||
|
|
725acc969a | ||
|
|
3815e2e6d8 | ||
|
|
aa2961c122 | ||
|
|
46297f7cd6 | ||
|
|
95eec75004 | ||
|
|
5e4b3546f3 | ||
|
|
8bbfcf1f82 | ||
|
|
65175f3cf3 | ||
|
|
fc33a62f3c | ||
|
|
2db924eb98 | ||
|
|
ddfcf4034b | ||
|
|
1b424ccfa2 | ||
|
|
221637b0cb | ||
|
|
7ad23528d0 | ||
|
|
f415f6c4d7 | ||
|
|
99fa9ba5ef | ||
|
|
ad0d4cd427 | ||
|
|
924680e974 | ||
|
|
2930ea0fcd | ||
|
|
b96127c485 | ||
|
|
954cc3b3b3 | ||
|
|
e671830402 | ||
|
|
c78f248c60 | ||
|
|
e70b313c80 | ||
|
|
87a681f866 | ||
|
|
b74a22d96c | ||
|
|
cc899de192 | ||
|
|
2a0bcea757 | ||
|
|
f1cc7a445f | ||
|
|
72a3b71f0e | ||
|
|
c8925a457a | ||
|
|
a5fe6c500c | ||
|
|
6f1f433a9c | ||
|
|
c6817210b1 | ||
|
|
4fd4537bc6 | ||
|
|
a1d7bed86f | ||
|
|
d1d5b3d564 | ||
|
|
9c228a0bd8 | ||
|
|
d4f0c6e8a7 | ||
|
|
5f0c773e74 | ||
|
|
43b426a501 | ||
|
|
8a4af027a1 | ||
|
|
466f6c4a39 | ||
|
|
d2b61d0ef2 | ||
|
|
43592c2a29 | ||
|
|
6d47b8d52b | ||
|
|
aa48766b88 | ||
|
|
bf6f11dc6c | ||
|
|
3e8d0f5e1c | ||
|
|
ff7256cedf | ||
|
|
b7eb412c79 | ||
|
|
7b88c63ae6 | ||
|
|
94b97e28f7 | ||
|
|
2f93b80484 | ||
|
|
4f90a1e88a | ||
|
|
aba1ce04e7 | ||
|
|
e2210c97e2 | ||
|
|
804c853d8f | ||
|
|
edd0002e43 |
53 changed files with 990 additions and 270 deletions
6
.github/workflows/languages.yaml
vendored
6
.github/workflows/languages.yaml
vendored
|
|
@ -21,7 +21,7 @@ jobs:
|
|||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: install deps
|
||||
run: python -mpip install -e . -r requirements-dev.txt
|
||||
- name: vars
|
||||
|
|
@ -36,10 +36,10 @@ jobs:
|
|||
matrix:
|
||||
include: ${{ fromJSON(needs.vars.outputs.languages) }}
|
||||
steps:
|
||||
- uses: asottile/workflows/.github/actions/fast-checkout@v1.4.0
|
||||
- uses: asottile/workflows/.github/actions/fast-checkout@v1.8.1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
|
||||
- run: echo "$CONDA\Scripts" >> "$GITHUB_PATH"
|
||||
shell: bash
|
||||
|
|
|
|||
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
|
|
@ -12,12 +12,12 @@ concurrency:
|
|||
|
||||
jobs:
|
||||
main-windows:
|
||||
uses: asottile/workflows/.github/workflows/tox.yml@v1.6.0
|
||||
uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1
|
||||
with:
|
||||
env: '["py39"]'
|
||||
env: '["py310"]'
|
||||
os: windows-latest
|
||||
main-linux:
|
||||
uses: asottile/workflows/.github/workflows/tox.yml@v1.6.0
|
||||
uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1
|
||||
with:
|
||||
env: '["py39", "py310", "py311", "py312"]'
|
||||
env: '["py310", "py311", "py312", "py313"]'
|
||||
os: ubuntu-latest
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
|
@ -10,34 +10,34 @@ repos:
|
|||
- id: name-tests-test
|
||||
- id: requirements-txt-fixer
|
||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||
rev: v2.7.0
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: setup-cfg-fmt
|
||||
- repo: https://github.com/asottile/reorder-python-imports
|
||||
rev: v3.14.0
|
||||
rev: v3.16.0
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
exclude: ^(pre_commit/resources/|testing/resources/python3_hooks_repo/)
|
||||
args: [--py39-plus, --add-import, 'from __future__ import annotations']
|
||||
exclude: ^pre_commit/resources/
|
||||
args: [--py310-plus, --add-import, 'from __future__ import annotations']
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
rev: v3.1.0
|
||||
rev: v4.0.0
|
||||
hooks:
|
||||
- id: add-trailing-comma
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.19.1
|
||||
rev: v3.21.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py39-plus]
|
||||
args: [--py310-plus]
|
||||
- repo: https://github.com/hhatto/autopep8
|
||||
rev: v2.3.1
|
||||
rev: v2.3.2
|
||||
hooks:
|
||||
- id: autopep8
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.1.1
|
||||
rev: 7.3.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.14.1
|
||||
rev: v1.19.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
additional_dependencies: [types-pyyaml]
|
||||
|
|
|
|||
68
CHANGELOG.md
68
CHANGELOG.md
|
|
@ -1,3 +1,67 @@
|
|||
4.5.1 - 2025-12-16
|
||||
==================
|
||||
|
||||
### Fixes
|
||||
- Fix `language: python` with `repo: local` without `additional_dependencies`.
|
||||
- #3597 PR by @asottile.
|
||||
|
||||
4.5.0 - 2025-11-22
|
||||
==================
|
||||
|
||||
### Features
|
||||
- Add `pre-commit hazmat`.
|
||||
- #3585 PR by @asottile.
|
||||
|
||||
4.4.0 - 2025-11-08
|
||||
==================
|
||||
|
||||
### Features
|
||||
- Add `--fail-fast` option to `pre-commit run`.
|
||||
- #3528 PR by @JulianMaurin.
|
||||
- Upgrade `ruby-build` / `rbenv`.
|
||||
- #3566 PR by @asottile.
|
||||
- #3565 issue by @MRigal.
|
||||
- Add `language: unsupported` / `language: unsupported_script` as aliases
|
||||
for `language: system` / `language: script` (which will eventually be
|
||||
deprecated).
|
||||
- #3577 PR by @asottile.
|
||||
- Add support docker-in-docker detection for cgroups v2.
|
||||
- #3535 PR by @br-rhrbacek.
|
||||
- #3360 issue by @JasonAlt.
|
||||
|
||||
### Fixes
|
||||
- Handle when docker gives `SecurityOptions: null`.
|
||||
- #3537 PR by @asottile.
|
||||
- #3514 issue by @jenstroeger.
|
||||
- Fix error context for invalid `stages` in `.pre-commit-config.yaml`.
|
||||
- #3576 PR by @asottile.
|
||||
|
||||
4.3.0 - 2025-08-09
|
||||
==================
|
||||
|
||||
### Features
|
||||
- `language: docker` / `language: docker_image`: detect rootless docker.
|
||||
- #3446 PR by @matthewhughes934.
|
||||
- #1243 issue by @dkolepp.
|
||||
- `language: julia`: avoid `startup.jl` when executing hooks.
|
||||
- #3496 PR by @ericphanson.
|
||||
- `language: dart`: support latest dart versions which require a higher sdk
|
||||
lower bound.
|
||||
- #3507 PR by @bc-lee.
|
||||
|
||||
4.2.0 - 2025-03-18
|
||||
==================
|
||||
|
||||
### Features
|
||||
- For `language: python` first attempt a versioned python executable for
|
||||
the default language version before consulting a potentially unversioned
|
||||
`sys.executable`.
|
||||
- #3430 PR by @asottile.
|
||||
|
||||
### Fixes
|
||||
- Handle error during conflict detection when a file is named "HEAD"
|
||||
- #3425 PR by @tusharsadhwani.
|
||||
|
||||
4.1.0 - 2025-01-20
|
||||
==================
|
||||
|
||||
|
|
@ -45,7 +109,7 @@
|
|||
- #3315 PR by @asottile.
|
||||
- #2732 issue by @asottile.
|
||||
|
||||
### Migrating
|
||||
### Updating
|
||||
- `language: python_venv` has been removed -- use `language: python` instead.
|
||||
- #3320 PR by @asottile.
|
||||
- #2734 issue by @asottile.
|
||||
|
|
@ -133,7 +197,7 @@
|
|||
- Use `time.monotonic()` for more accurate hook timing.
|
||||
- #3024 PR by @adamchainz.
|
||||
|
||||
### Migrating
|
||||
### Updating
|
||||
- Require npm 6.x+ for `language: node` hooks.
|
||||
- #2996 PR by @RoelAdriaans.
|
||||
- #1983 issue by @henryiii.
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ from pre_commit.languages import python
|
|||
from pre_commit.languages import r
|
||||
from pre_commit.languages import ruby
|
||||
from pre_commit.languages import rust
|
||||
from pre_commit.languages import script
|
||||
from pre_commit.languages import swift
|
||||
from pre_commit.languages import system
|
||||
from pre_commit.languages import unsupported
|
||||
from pre_commit.languages import unsupported_script
|
||||
|
||||
|
||||
languages: dict[str, Language] = {
|
||||
|
|
@ -43,8 +43,8 @@ languages: dict[str, Language] = {
|
|||
'r': r,
|
||||
'ruby': ruby,
|
||||
'rust': rust,
|
||||
'script': script,
|
||||
'swift': swift,
|
||||
'system': system,
|
||||
'unsupported': unsupported,
|
||||
'unsupported_script': unsupported_script,
|
||||
}
|
||||
language_names = sorted(languages)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import os.path
|
|||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
from typing import NamedTuple
|
||||
|
|
@ -116,11 +117,12 @@ class StagesMigrationNoDefault(NamedTuple):
|
|||
if self.key not in dct:
|
||||
return
|
||||
|
||||
val = dct[self.key]
|
||||
cfgv.check_array(cfgv.check_any)(val)
|
||||
with cfgv.validate_context(f'At key: {self.key}'):
|
||||
val = dct[self.key]
|
||||
cfgv.check_array(cfgv.check_any)(val)
|
||||
|
||||
val = [transform_stage(v) for v in val]
|
||||
cfgv.check_array(cfgv.check_one_of(STAGES))(val)
|
||||
val = [transform_stage(v) for v in val]
|
||||
cfgv.check_array(cfgv.check_one_of(STAGES))(val)
|
||||
|
||||
def apply_default(self, dct: dict[str, Any]) -> None:
|
||||
if self.key not in dct:
|
||||
|
|
@ -189,6 +191,42 @@ class DeprecatedDefaultStagesWarning(NamedTuple):
|
|||
raise NotImplementedError
|
||||
|
||||
|
||||
def _translate_language(name: str) -> str:
|
||||
return {
|
||||
'system': 'unsupported',
|
||||
'script': 'unsupported_script',
|
||||
}.get(name, name)
|
||||
|
||||
|
||||
class LanguageMigration(NamedTuple): # remove
|
||||
key: str
|
||||
check_fn: Callable[[object], None]
|
||||
|
||||
def check(self, dct: dict[str, Any]) -> None:
|
||||
if self.key not in dct:
|
||||
return
|
||||
|
||||
with cfgv.validate_context(f'At key: {self.key}'):
|
||||
self.check_fn(_translate_language(dct[self.key]))
|
||||
|
||||
def apply_default(self, dct: dict[str, Any]) -> None:
|
||||
if self.key not in dct:
|
||||
return
|
||||
|
||||
dct[self.key] = _translate_language(dct[self.key])
|
||||
|
||||
def remove_default(self, dct: dict[str, Any]) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class LanguageMigrationRequired(LanguageMigration): # replace with Required
|
||||
def check(self, dct: dict[str, Any]) -> None:
|
||||
if self.key not in dct:
|
||||
raise cfgv.ValidationError(f'Missing required key: {self.key}')
|
||||
|
||||
super().check(dct)
|
||||
|
||||
|
||||
MANIFEST_HOOK_DICT = cfgv.Map(
|
||||
'Hook', 'id',
|
||||
|
||||
|
|
@ -202,7 +240,7 @@ MANIFEST_HOOK_DICT = cfgv.Map(
|
|||
cfgv.Required('id', cfgv.check_string),
|
||||
cfgv.Required('name', cfgv.check_string),
|
||||
cfgv.Required('entry', cfgv.check_string),
|
||||
cfgv.Required('language', cfgv.check_one_of(language_names)),
|
||||
LanguageMigrationRequired('language', cfgv.check_one_of(language_names)),
|
||||
cfgv.Optional('alias', cfgv.check_string, ''),
|
||||
|
||||
cfgv.Optional('files', check_string_regex, ''),
|
||||
|
|
@ -232,10 +270,19 @@ class InvalidManifestError(FatalError):
|
|||
pass
|
||||
|
||||
|
||||
def _load_manifest_forward_compat(contents: str) -> object:
|
||||
obj = yaml_load(contents)
|
||||
if isinstance(obj, dict):
|
||||
check_min_version('5')
|
||||
raise AssertionError('unreachable')
|
||||
else:
|
||||
return obj
|
||||
|
||||
|
||||
load_manifest = functools.partial(
|
||||
cfgv.load_from_filename,
|
||||
schema=MANIFEST_SCHEMA,
|
||||
load_strategy=yaml_load,
|
||||
load_strategy=_load_manifest_forward_compat,
|
||||
exc_tp=InvalidManifestError,
|
||||
)
|
||||
|
||||
|
|
@ -367,8 +414,10 @@ META_HOOK_DICT = cfgv.Map(
|
|||
'Hook', 'id',
|
||||
cfgv.Required('id', cfgv.check_string),
|
||||
cfgv.Required('id', cfgv.check_one_of(tuple(k for k, _ in _meta))),
|
||||
# language must be system
|
||||
cfgv.Optional('language', cfgv.check_one_of({'system'}), 'system'),
|
||||
# language must be `unsupported`
|
||||
cfgv.Optional(
|
||||
'language', cfgv.check_one_of({'unsupported'}), 'unsupported',
|
||||
),
|
||||
# entry cannot be overridden
|
||||
NotAllowed('entry', cfgv.check_any),
|
||||
*(
|
||||
|
|
@ -401,8 +450,10 @@ CONFIG_HOOK_DICT = cfgv.Map(
|
|||
for item in MANIFEST_HOOK_DICT.items
|
||||
if item.key != 'id'
|
||||
if item.key != 'stages'
|
||||
if item.key != 'language' # remove
|
||||
),
|
||||
StagesMigrationNoDefault('stages', []),
|
||||
LanguageMigration('language', cfgv.check_one_of(language_names)), # remove
|
||||
*_COMMON_HOOK_WARNINGS,
|
||||
)
|
||||
LOCAL_HOOK_DICT = cfgv.Map(
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from pre_commit.clientlib import load_manifest
|
|||
from pre_commit.clientlib import LOCAL
|
||||
from pre_commit.clientlib import META
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
|
||||
def _mark_used_repos(
|
||||
|
|
@ -26,7 +27,8 @@ def _mark_used_repos(
|
|||
for hook in repo['hooks']:
|
||||
deps = hook.get('additional_dependencies')
|
||||
unused_repos.discard((
|
||||
store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION,
|
||||
store.db_repo_name(repo['repo'], deps),
|
||||
C.LOCAL_REPO_VERSION,
|
||||
))
|
||||
else:
|
||||
key = (repo['repo'], repo['rev'])
|
||||
|
|
@ -56,34 +58,41 @@ def _mark_used_repos(
|
|||
))
|
||||
|
||||
|
||||
def _gc_repos(store: Store) -> int:
|
||||
configs = store.select_all_configs()
|
||||
repos = store.select_all_repos()
|
||||
def _gc(store: Store) -> int:
|
||||
with store.exclusive_lock(), store.connect() as db:
|
||||
store._create_configs_table(db)
|
||||
|
||||
# delete config paths which do not exist
|
||||
dead_configs = [p for p in configs if not os.path.exists(p)]
|
||||
live_configs = [p for p in configs if os.path.exists(p)]
|
||||
repos = db.execute('SELECT repo, ref, path FROM repos').fetchall()
|
||||
all_repos = {(repo, ref): path for repo, ref, path in repos}
|
||||
unused_repos = set(all_repos)
|
||||
|
||||
all_repos = {(repo, ref): path for repo, ref, path in repos}
|
||||
unused_repos = set(all_repos)
|
||||
for config_path in live_configs:
|
||||
try:
|
||||
config = load_config(config_path)
|
||||
except InvalidConfigError:
|
||||
dead_configs.append(config_path)
|
||||
continue
|
||||
else:
|
||||
for repo in config['repos']:
|
||||
_mark_used_repos(store, all_repos, unused_repos, repo)
|
||||
configs_rows = db.execute('SELECT path FROM configs').fetchall()
|
||||
configs = [path for path, in configs_rows]
|
||||
|
||||
store.delete_configs(dead_configs)
|
||||
for db_repo_name, ref in unused_repos:
|
||||
store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
|
||||
return len(unused_repos)
|
||||
dead_configs = []
|
||||
for config_path in configs:
|
||||
try:
|
||||
config = load_config(config_path)
|
||||
except InvalidConfigError:
|
||||
dead_configs.append(config_path)
|
||||
continue
|
||||
else:
|
||||
for repo in config['repos']:
|
||||
_mark_used_repos(store, all_repos, unused_repos, repo)
|
||||
|
||||
paths = [(path,) for path in dead_configs]
|
||||
db.executemany('DELETE FROM configs WHERE path = ?', paths)
|
||||
|
||||
db.executemany(
|
||||
'DELETE FROM repos WHERE repo = ? and ref = ?',
|
||||
sorted(unused_repos),
|
||||
)
|
||||
for k in unused_repos:
|
||||
rmtree(all_repos[k])
|
||||
|
||||
return len(unused_repos)
|
||||
|
||||
|
||||
def gc(store: Store) -> int:
|
||||
with store.exclusive_lock():
|
||||
repos_removed = _gc_repos(store)
|
||||
output.write_line(f'{repos_removed} repo(s) removed.')
|
||||
output.write_line(f'{_gc(store)} repo(s) removed.')
|
||||
return 0
|
||||
|
|
|
|||
95
pre_commit/commands/hazmat.py
Normal file
95
pre_commit/commands/hazmat.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
from collections.abc import Sequence
|
||||
|
||||
from pre_commit.parse_shebang import normalize_cmd
|
||||
|
||||
|
||||
def add_parsers(parser: argparse.ArgumentParser) -> None:
|
||||
subparsers = parser.add_subparsers(dest='tool')
|
||||
|
||||
cd_parser = subparsers.add_parser(
|
||||
'cd', help='cd to a subdir and run the command',
|
||||
)
|
||||
cd_parser.add_argument('subdir')
|
||||
cd_parser.add_argument('cmd', nargs=argparse.REMAINDER)
|
||||
|
||||
ignore_exit_code_parser = subparsers.add_parser(
|
||||
'ignore-exit-code', help='run the command but ignore the exit code',
|
||||
)
|
||||
ignore_exit_code_parser.add_argument('cmd', nargs=argparse.REMAINDER)
|
||||
|
||||
n1_parser = subparsers.add_parser(
|
||||
'n1', help='run the command once per filename',
|
||||
)
|
||||
n1_parser.add_argument('cmd', nargs=argparse.REMAINDER)
|
||||
|
||||
|
||||
def _cmd_filenames(cmd: tuple[str, ...]) -> tuple[
|
||||
tuple[str, ...],
|
||||
tuple[str, ...],
|
||||
]:
|
||||
for idx, val in enumerate(reversed(cmd)):
|
||||
if val == '--':
|
||||
split = len(cmd) - idx
|
||||
break
|
||||
else:
|
||||
raise SystemExit('hazmat entry must end with `--`')
|
||||
|
||||
return cmd[:split - 1], cmd[split:]
|
||||
|
||||
|
||||
def cd(subdir: str, cmd: tuple[str, ...]) -> int:
|
||||
cmd, filenames = _cmd_filenames(cmd)
|
||||
|
||||
prefix = f'{subdir}/'
|
||||
new_filenames = []
|
||||
for filename in filenames:
|
||||
if not filename.startswith(prefix):
|
||||
raise SystemExit(f'unexpected file without {prefix=}: {filename}')
|
||||
else:
|
||||
new_filenames.append(filename.removeprefix(prefix))
|
||||
|
||||
cmd = normalize_cmd(cmd)
|
||||
return subprocess.call((*cmd, *new_filenames), cwd=subdir)
|
||||
|
||||
|
||||
def ignore_exit_code(cmd: tuple[str, ...]) -> int:
|
||||
cmd = normalize_cmd(cmd)
|
||||
subprocess.call(cmd)
|
||||
return 0
|
||||
|
||||
|
||||
def n1(cmd: tuple[str, ...]) -> int:
|
||||
cmd, filenames = _cmd_filenames(cmd)
|
||||
cmd = normalize_cmd(cmd)
|
||||
ret = 0
|
||||
for filename in filenames:
|
||||
ret |= subprocess.call((*cmd, filename))
|
||||
return ret
|
||||
|
||||
|
||||
def impl(args: argparse.Namespace) -> int:
|
||||
args.cmd = tuple(args.cmd)
|
||||
if args.tool == 'cd':
|
||||
return cd(args.subdir, args.cmd)
|
||||
elif args.tool == 'ignore-exit-code':
|
||||
return ignore_exit_code(args.cmd)
|
||||
elif args.tool == 'n1':
|
||||
return n1(args.cmd)
|
||||
else:
|
||||
raise NotImplementedError(f'unexpected tool: {args.tool}')
|
||||
|
||||
|
||||
def main(argv: Sequence[str] | None = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
add_parsers(parser)
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
return impl(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main())
|
||||
|
|
@ -106,6 +106,7 @@ def _ns(
|
|||
hook=None,
|
||||
verbose=False,
|
||||
show_diff_on_failure=False,
|
||||
fail_fast=False,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||
import functools
|
||||
import itertools
|
||||
import textwrap
|
||||
from typing import Callable
|
||||
from collections.abc import Callable
|
||||
|
||||
import cfgv
|
||||
import yaml
|
||||
|
|
|
|||
|
|
@ -298,7 +298,8 @@ def _run_hooks(
|
|||
verbose=args.verbose, use_color=args.color,
|
||||
)
|
||||
retval |= current_retval
|
||||
if current_retval and (config['fail_fast'] or hook.fail_fast):
|
||||
fail_fast = (config['fail_fast'] or hook.fail_fast or args.fail_fast)
|
||||
if current_retval and fail_fast:
|
||||
break
|
||||
if retval and args.show_diff_on_failure and prior_diff:
|
||||
if args.all_files:
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ from __future__ import annotations
|
|||
import contextlib
|
||||
import errno
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from typing import Callable
|
||||
|
||||
|
||||
if sys.platform == 'win32': # pragma: no cover (windows)
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ def get_conflicted_files() -> set[str]:
|
|||
merge_diff_filenames = zsplit(
|
||||
cmd_output(
|
||||
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
||||
'-m', tree_hash, 'HEAD', 'MERGE_HEAD',
|
||||
'-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--',
|
||||
)[1],
|
||||
)
|
||||
return set(merge_conflict_filenames) | set(merge_diff_filenames)
|
||||
|
|
@ -219,7 +219,7 @@ def check_for_cygwin_mismatch() -> None:
|
|||
|
||||
if is_cygwin_python ^ is_cygwin_git:
|
||||
exe_type = {True: '(cygwin)', False: '(windows)'}
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f'pre-commit has detected a mix of cygwin python / git\n'
|
||||
f'This combination is not supported, it is likely you will '
|
||||
f'receive an error later in the program.\n'
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import os
|
|||
import random
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
|
@ -171,7 +172,10 @@ def run_xargs(
|
|||
|
||||
|
||||
def hook_cmd(entry: str, args: Sequence[str]) -> tuple[str, ...]:
|
||||
return (*shlex.split(entry), *args)
|
||||
cmd = shlex.split(entry)
|
||||
if cmd[:2] == ['pre-commit', 'hazmat']:
|
||||
cmd = [sys.executable, '-m', 'pre_commit.commands.hazmat', *cmd[2:]]
|
||||
return (*cmd, *args)
|
||||
|
||||
|
||||
def basic_run_hook(
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Sequence
|
||||
|
||||
from pre_commit import lang_base
|
||||
|
|
@ -16,31 +19,33 @@ get_default_version = lang_base.basic_get_default_version
|
|||
health_check = lang_base.basic_health_check
|
||||
in_env = lang_base.no_env # no special environment for docker
|
||||
|
||||
|
||||
def _is_in_docker() -> bool:
|
||||
try:
|
||||
with open('/proc/1/cgroup', 'rb') as f:
|
||||
return b'docker' in f.read()
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
_HOSTNAME_MOUNT_RE = re.compile(
|
||||
rb"""
|
||||
/containers
|
||||
(?:/overlay-containers)?
|
||||
/([a-z0-9]{64})
|
||||
(?:/userdata)?
|
||||
/hostname
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def _get_container_id() -> str:
|
||||
# It's assumed that we already check /proc/1/cgroup in _is_in_docker. The
|
||||
# cpuset cgroup controller existed since cgroups were introduced so this
|
||||
# way of getting the container ID is pretty reliable.
|
||||
with open('/proc/1/cgroup', 'rb') as f:
|
||||
for line in f.readlines():
|
||||
if line.split(b':')[1] == b'cpuset':
|
||||
return os.path.basename(line.split(b':')[2]).strip().decode()
|
||||
raise RuntimeError('Failed to find the container ID in /proc/1/cgroup.')
|
||||
def _get_container_id() -> str | None:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
with open('/proc/1/mountinfo', 'rb') as f:
|
||||
for line in f:
|
||||
m = _HOSTNAME_MOUNT_RE.search(line)
|
||||
if m:
|
||||
return m[1].decode()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _get_docker_path(path: str) -> str:
|
||||
if not _is_in_docker():
|
||||
return path
|
||||
|
||||
container_id = _get_container_id()
|
||||
if container_id is None:
|
||||
return path
|
||||
|
||||
try:
|
||||
_, out, _ = cmd_output_b('docker', 'inspect', container_id)
|
||||
|
|
@ -101,7 +106,32 @@ def install_environment(
|
|||
os.mkdir(directory)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def _is_rootless() -> bool: # pragma: win32 no cover
|
||||
retcode, out, _ = cmd_output_b(
|
||||
'docker', 'system', 'info', '--format', '{{ json . }}',
|
||||
)
|
||||
if retcode != 0:
|
||||
return False
|
||||
|
||||
info = json.loads(out)
|
||||
try:
|
||||
return (
|
||||
# docker:
|
||||
# https://docs.docker.com/reference/api/engine/version/v1.48/#tag/System/operation/SystemInfo
|
||||
'name=rootless' in (info.get('SecurityOptions') or ()) or
|
||||
# podman:
|
||||
# https://docs.podman.io/en/latest/_static/api.html?version=v5.4#tag/system/operation/SystemInfoLibpod
|
||||
info['host']['security']['rootless']
|
||||
)
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
def get_docker_user() -> tuple[str, ...]: # pragma: win32 no cover
|
||||
if _is_rootless():
|
||||
return ()
|
||||
|
||||
try:
|
||||
return ('-u', f'{os.getuid()}:{os.getgid()}')
|
||||
except AttributeError:
|
||||
|
|
|
|||
|
|
@ -90,8 +90,7 @@ def _infer_go_version(version: str) -> str:
|
|||
if version != C.DEFAULT:
|
||||
return version
|
||||
resp = urllib.request.urlopen('https://go.dev/dl/?mode=json')
|
||||
# TODO: 3.9+ .removeprefix('go')
|
||||
return json.load(resp)[0]['version'][2:]
|
||||
return json.load(resp)[0]['version'].removeprefix('go')
|
||||
|
||||
|
||||
def _get_url(version: str) -> str:
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ def run_hook(
|
|||
|
||||
cmd = lang_base.hook_cmd(entry, args)
|
||||
script = cmd[0] if is_local else prefix.path(cmd[0])
|
||||
cmd = ('julia', script, *cmd[1:])
|
||||
cmd = ('julia', '--startup-file=no', script, *cmd[1:])
|
||||
return lang_base.run_xargs(
|
||||
cmd,
|
||||
file_args,
|
||||
|
|
@ -127,6 +127,7 @@ def install_environment(
|
|||
end
|
||||
"""
|
||||
cmd_output_b(
|
||||
'julia', '-e', julia_code, '--', envdir, *additional_dependencies,
|
||||
'julia', '--startup-file=no', '-e', julia_code, '--', envdir,
|
||||
*additional_dependencies,
|
||||
cwd=prefix.prefix_dir,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -75,6 +75,13 @@ def _find_by_py_launcher(
|
|||
return None
|
||||
|
||||
|
||||
def _impl_exe_name() -> str:
|
||||
if sys.implementation.name == 'cpython': # pragma: cpython cover
|
||||
return 'python'
|
||||
else: # pragma: cpython no cover
|
||||
return sys.implementation.name # pypy mostly
|
||||
|
||||
|
||||
def _find_by_sys_executable() -> str | None:
|
||||
def _norm(path: str) -> str | None:
|
||||
_, exe = os.path.split(path.lower())
|
||||
|
|
@ -100,18 +107,25 @@ def _find_by_sys_executable() -> str | None:
|
|||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def get_default_version() -> str: # pragma: no cover (platform dependent)
|
||||
# First attempt from `sys.executable` (or the realpath)
|
||||
exe = _find_by_sys_executable()
|
||||
if exe:
|
||||
return exe
|
||||
v_major = f'{sys.version_info[0]}'
|
||||
v_minor = f'{sys.version_info[0]}.{sys.version_info[1]}'
|
||||
|
||||
# Next try the `pythonX.X` executable
|
||||
exe = f'python{sys.version_info[0]}.{sys.version_info[1]}'
|
||||
if find_executable(exe):
|
||||
return exe
|
||||
# attempt the likely implementation exe
|
||||
for potential in (v_minor, v_major):
|
||||
exe = f'{_impl_exe_name()}{potential}'
|
||||
if find_executable(exe):
|
||||
return exe
|
||||
|
||||
if _find_by_py_launcher(exe):
|
||||
return exe
|
||||
# next try `sys.executable` (or the realpath)
|
||||
maybe_exe = _find_by_sys_executable()
|
||||
if maybe_exe:
|
||||
return maybe_exe
|
||||
|
||||
# maybe on windows we can find it via py launcher?
|
||||
if sys.platform == 'win32': # pragma: win32 cover
|
||||
exe = f'python{v_minor}'
|
||||
if _find_by_py_launcher(exe):
|
||||
return exe
|
||||
|
||||
# We tried!
|
||||
return C.DEFAULT
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import pre_commit.constants as C
|
|||
from pre_commit import clientlib
|
||||
from pre_commit import git
|
||||
from pre_commit.color import add_color_option
|
||||
from pre_commit.commands import hazmat
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.clean import clean
|
||||
from pre_commit.commands.gc import gc
|
||||
|
|
@ -41,7 +42,7 @@ os.environ.pop('__PYVENV_LAUNCHER__', None)
|
|||
os.environ.pop('PYTHONEXECUTABLE', None)
|
||||
|
||||
COMMANDS_NO_GIT = {
|
||||
'clean', 'gc', 'init-templatedir', 'sample-config',
|
||||
'clean', 'gc', 'hazmat', 'init-templatedir', 'sample-config',
|
||||
'validate-config', 'validate-manifest',
|
||||
}
|
||||
|
||||
|
|
@ -62,10 +63,10 @@ def _add_hook_type_option(parser: argparse.ArgumentParser) -> None:
|
|||
|
||||
def _add_run_options(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument('hook', nargs='?', help='A single hook-id to run')
|
||||
parser.add_argument('--verbose', '-v', action='store_true', default=False)
|
||||
parser.add_argument('--verbose', '-v', action='store_true')
|
||||
mutex_group = parser.add_mutually_exclusive_group(required=False)
|
||||
mutex_group.add_argument(
|
||||
'--all-files', '-a', action='store_true', default=False,
|
||||
'--all-files', '-a', action='store_true',
|
||||
help='Run on all the files in the repo.',
|
||||
)
|
||||
mutex_group.add_argument(
|
||||
|
|
@ -76,6 +77,10 @@ def _add_run_options(parser: argparse.ArgumentParser) -> None:
|
|||
'--show-diff-on-failure', action='store_true',
|
||||
help='When hooks fail, run `git diff` directly afterward.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fail-fast', action='store_true',
|
||||
help='Stop after the first failing hook.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--hook-stage',
|
||||
choices=clientlib.STAGES,
|
||||
|
|
@ -241,6 +246,11 @@ def main(argv: Sequence[str] | None = None) -> int:
|
|||
|
||||
_add_cmd('gc', help='Clean unused cached repos.')
|
||||
|
||||
hazmat_parser = _add_cmd(
|
||||
'hazmat', help='Composable tools for rare use in hook `entry`.',
|
||||
)
|
||||
hazmat.add_parsers(hazmat_parser)
|
||||
|
||||
init_templatedir_parser = _add_cmd(
|
||||
'init-templatedir',
|
||||
help=(
|
||||
|
|
@ -275,7 +285,7 @@ def main(argv: Sequence[str] | None = None) -> int:
|
|||
)
|
||||
_add_hook_type_option(install_parser)
|
||||
install_parser.add_argument(
|
||||
'--allow-missing-config', action='store_true', default=False,
|
||||
'--allow-missing-config', action='store_true',
|
||||
help=(
|
||||
'Whether to allow a missing `pre-commit` configuration file '
|
||||
'or exit with a failure code.'
|
||||
|
|
@ -385,6 +395,8 @@ def main(argv: Sequence[str] | None = None) -> int:
|
|||
return clean(store)
|
||||
elif args.command == 'gc':
|
||||
return gc(store)
|
||||
elif args.command == 'hazmat':
|
||||
return hazmat.impl(args)
|
||||
elif args.command == 'hook-impl':
|
||||
return hook_impl(
|
||||
store,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
name: pre_commit_empty_pubspec
|
||||
environment:
|
||||
sdk: '>=2.10.0'
|
||||
sdk: '>=2.12.0'
|
||||
executables: {}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from setuptools import setup
|
||||
|
||||
|
||||
setup(name='pre-commit-placeholder-package', version='0.0.0')
|
||||
setup(name='pre-commit-placeholder-package', version='0.0.0', py_modules=[])
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
|
|
@ -5,9 +5,9 @@ import logging
|
|||
import os.path
|
||||
import sqlite3
|
||||
import tempfile
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Sequence
|
||||
from typing import Callable
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import clientlib
|
||||
|
|
@ -17,7 +17,6 @@ from pre_commit.util import CalledProcessError
|
|||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import resource_text
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
|
@ -96,7 +95,7 @@ class Store:
|
|||
' PRIMARY KEY (repo, ref)'
|
||||
');',
|
||||
)
|
||||
self._create_config_table(db)
|
||||
self._create_configs_table(db)
|
||||
|
||||
# Atomic file move
|
||||
os.replace(tmpfile, self.db_path)
|
||||
|
|
@ -215,7 +214,7 @@ class Store:
|
|||
'local', C.LOCAL_REPO_VERSION, deps, _make_local_repo,
|
||||
)
|
||||
|
||||
def _create_config_table(self, db: sqlite3.Connection) -> None:
|
||||
def _create_configs_table(self, db: sqlite3.Connection) -> None:
|
||||
db.executescript(
|
||||
'CREATE TABLE IF NOT EXISTS configs ('
|
||||
' path TEXT NOT NULL,'
|
||||
|
|
@ -232,28 +231,5 @@ class Store:
|
|||
return
|
||||
with self.connect() as db:
|
||||
# TODO: eventually remove this and only create in _create
|
||||
self._create_config_table(db)
|
||||
self._create_configs_table(db)
|
||||
db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,))
|
||||
|
||||
def select_all_configs(self) -> list[str]:
|
||||
with self.connect() as db:
|
||||
self._create_config_table(db)
|
||||
rows = db.execute('SELECT path FROM configs').fetchall()
|
||||
return [path for path, in rows]
|
||||
|
||||
def delete_configs(self, configs: list[str]) -> None:
|
||||
with self.connect() as db:
|
||||
rows = [(path,) for path in configs]
|
||||
db.executemany('DELETE FROM configs WHERE path = ?', rows)
|
||||
|
||||
def select_all_repos(self) -> list[tuple[str, str, str]]:
|
||||
with self.connect() as db:
|
||||
return db.execute('SELECT repo, ref, path from repos').fetchall()
|
||||
|
||||
def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None:
|
||||
with self.connect() as db:
|
||||
db.execute(
|
||||
'DELETE FROM repos WHERE repo = ? and ref = ?',
|
||||
(db_repo_name, ref),
|
||||
)
|
||||
rmtree(path)
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ import shutil
|
|||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from types import TracebackType
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ import multiprocessing
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import MutableMapping
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import TypeVar
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[metadata]
|
||||
name = pre_commit
|
||||
version = 4.1.0
|
||||
version = 4.5.1
|
||||
description = A framework for managing and maintaining multi-language pre-commit hooks.
|
||||
long_description = file: README.md
|
||||
long_description_content_type = text/markdown
|
||||
|
|
@ -10,7 +10,6 @@ author_email = asottile@umich.edu
|
|||
license = MIT
|
||||
license_files = LICENSE
|
||||
classifiers =
|
||||
License :: OSI Approved :: MIT License
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: Implementation :: CPython
|
||||
|
|
@ -24,7 +23,7 @@ install_requires =
|
|||
nodeenv>=0.11.1
|
||||
pyyaml>=5.1
|
||||
virtualenv>=20.10.0
|
||||
python_requires = >=3.9
|
||||
python_requires = >=3.10
|
||||
|
||||
[options.packages.find]
|
||||
exclude =
|
||||
|
|
@ -53,6 +52,7 @@ check_untyped_defs = true
|
|||
disallow_any_generics = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_untyped_defs = true
|
||||
enable_error_code = deprecated
|
||||
warn_redundant_casts = true
|
||||
warn_unused_ignores = true
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
VERSION=2.13.4
|
||||
VERSION=2.19.6
|
||||
|
||||
if [ "$OSTYPE" = msys ]; then
|
||||
URL="https://storage.googleapis.com/dart-archive/channels/stable/release/${VERSION}/sdk/dartsdk-windows-x64-release.zip"
|
||||
|
|
|
|||
|
|
@ -16,8 +16,8 @@ from collections.abc import Sequence
|
|||
|
||||
|
||||
REPOS = (
|
||||
('rbenv', 'https://github.com/rbenv/rbenv', '38e1fbb'),
|
||||
('ruby-build', 'https://github.com/rbenv/ruby-build', 'ed384c8'),
|
||||
('rbenv', 'https://github.com/rbenv/rbenv', '10e96bfc'),
|
||||
('ruby-build', 'https://github.com/rbenv/ruby-build', '447468b1'),
|
||||
(
|
||||
'ruby-download',
|
||||
'https://github.com/garnieretienne/rvm-download',
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
- id: python3-hook
|
||||
name: Python 3 Hook
|
||||
entry: python3-hook
|
||||
language: python
|
||||
language_version: python3
|
||||
files: \.py$
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
print(sys.version_info[0])
|
||||
print(repr(sys.argv[1:]))
|
||||
print('Hello World')
|
||||
return 0
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='python3_hook',
|
||||
version='0.0.0',
|
||||
py_modules=['py3_hook'],
|
||||
entry_points={'console_scripts': ['python3-hook = py3_hook:main']},
|
||||
)
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
- id: system-hook-with-spaces
|
||||
name: System hook with spaces
|
||||
entry: bash -c 'echo "Hello World"'
|
||||
language: system
|
||||
files: \.sh$
|
||||
|
|
@ -40,6 +40,7 @@ def run_opts(
|
|||
color=False,
|
||||
verbose=False,
|
||||
hook=None,
|
||||
fail_fast=False,
|
||||
remote_branch='',
|
||||
local_branch='',
|
||||
from_ref='',
|
||||
|
|
@ -65,6 +66,7 @@ def run_opts(
|
|||
color=color,
|
||||
verbose=verbose,
|
||||
hook=hook,
|
||||
fail_fast=fail_fast,
|
||||
remote_branch=remote_branch,
|
||||
local_branch=local_branch,
|
||||
from_ref=from_ref,
|
||||
|
|
|
|||
|
|
@ -107,9 +107,6 @@ def main() -> int:
|
|||
shebang = '/usr/bin/env python3'
|
||||
zipapp.create_archive(tmpdir, filename, interpreter=shebang)
|
||||
|
||||
with open(f'{filename}.sha256sum', 'w') as f:
|
||||
subprocess.check_call(('sha256sum', filename), stdout=f)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ from pre_commit.clientlib import CONFIG_HOOK_DICT
|
|||
from pre_commit.clientlib import CONFIG_REPO_DICT
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION
|
||||
from pre_commit.clientlib import InvalidManifestError
|
||||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
||||
from pre_commit.clientlib import MANIFEST_SCHEMA
|
||||
from pre_commit.clientlib import META_HOOK_DICT
|
||||
|
|
@ -309,6 +311,27 @@ def test_validate_optional_sensible_regex_at_top_level(caplog, regex, warning):
|
|||
assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)]
|
||||
|
||||
|
||||
def test_invalid_stages_error():
|
||||
cfg = {'repos': [sample_local_config()]}
|
||||
cfg['repos'][0]['hooks'][0]['stages'] = ['invalid']
|
||||
|
||||
with pytest.raises(cfgv.ValidationError) as excinfo:
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
|
||||
assert str(excinfo.value) == (
|
||||
'\n'
|
||||
'==> At Config()\n'
|
||||
'==> At key: repos\n'
|
||||
"==> At Repository(repo='local')\n"
|
||||
'==> At key: hooks\n'
|
||||
"==> At Hook(id='do_not_commit')\n"
|
||||
# this line was missing due to the custom validator
|
||||
'==> At key: stages\n'
|
||||
'==> At index 0\n'
|
||||
"=====> Expected one of commit-msg, manual, post-checkout, post-commit, post-merge, post-rewrite, pre-commit, pre-merge-commit, pre-push, pre-rebase, prepare-commit-msg but got: 'invalid'" # noqa: E501
|
||||
)
|
||||
|
||||
|
||||
def test_warning_for_deprecated_stages(caplog):
|
||||
config_obj = sample_local_config()
|
||||
config_obj['hooks'][0]['stages'] = ['commit', 'push']
|
||||
|
|
@ -359,6 +382,26 @@ def test_no_warning_for_non_deprecated_default_stages(caplog):
|
|||
assert caplog.record_tuples == []
|
||||
|
||||
|
||||
def test_unsupported_language_migration():
|
||||
cfg = {'repos': [sample_local_config(), sample_local_config()]}
|
||||
cfg['repos'][0]['hooks'][0]['language'] = 'system'
|
||||
cfg['repos'][1]['hooks'][0]['language'] = 'script'
|
||||
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
ret = cfgv.apply_defaults(cfg, CONFIG_SCHEMA)
|
||||
|
||||
assert ret['repos'][0]['hooks'][0]['language'] == 'unsupported'
|
||||
assert ret['repos'][1]['hooks'][0]['language'] == 'unsupported_script'
|
||||
|
||||
|
||||
def test_unsupported_language_migration_language_required():
|
||||
cfg = {'repos': [sample_local_config()]}
|
||||
del cfg['repos'][0]['hooks'][0]['language']
|
||||
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'manifest_obj',
|
||||
(
|
||||
|
|
@ -547,3 +590,18 @@ def test_config_hook_stages_defaulting():
|
|||
'id': 'fake-hook',
|
||||
'stages': ['commit-msg', 'pre-push', 'pre-commit', 'pre-merge-commit'],
|
||||
}
|
||||
|
||||
|
||||
def test_manifest_v5_forward_compat(tmp_path):
|
||||
manifest = tmp_path.joinpath('.pre-commit-hooks.yaml')
|
||||
manifest.write_text('hooks: {}')
|
||||
|
||||
with pytest.raises(InvalidManifestError) as excinfo:
|
||||
load_manifest(manifest)
|
||||
assert str(excinfo.value) == (
|
||||
f'\n'
|
||||
f'==> File {manifest}\n'
|
||||
f'=====> \n'
|
||||
f'=====> pre-commit version 5 is required but version {C.VERSION} '
|
||||
f'is installed. Perhaps run `pip install --upgrade pre-commit`.'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,11 +19,13 @@ from testing.util import git_commit
|
|||
|
||||
|
||||
def _repo_count(store):
|
||||
return len(store.select_all_repos())
|
||||
with store.connect() as db:
|
||||
return db.execute('SELECT COUNT(1) FROM repos').fetchone()[0]
|
||||
|
||||
|
||||
def _config_count(store):
|
||||
return len(store.select_all_configs())
|
||||
with store.connect() as db:
|
||||
return db.execute('SELECT COUNT(1) FROM configs').fetchone()[0]
|
||||
|
||||
|
||||
def _remove_config_assert_cleared(store, cap_out):
|
||||
|
|
@ -153,7 +155,8 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out):
|
|||
install_hooks(C.CONFIG_FILE, store)
|
||||
|
||||
# we'll "break" the manifest to simulate an old version clone
|
||||
(_, _, path), = store.select_all_repos()
|
||||
with store.connect() as db:
|
||||
path, = db.execute('SELECT path FROM repos').fetchone()
|
||||
os.remove(os.path.join(path, C.MANIFEST_FILE))
|
||||
|
||||
assert _config_count(store) == 1
|
||||
|
|
@ -162,3 +165,11 @@ def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out):
|
|||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 0
|
||||
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
|
||||
|
||||
|
||||
def test_gc_pre_1_14_roll_forward(store, cap_out):
|
||||
with store.connect() as db: # simulate pre-1.14.0
|
||||
db.executescript('DROP TABLE configs')
|
||||
|
||||
assert not gc(store)
|
||||
assert cap_out.get() == '0 repo(s) removed.\n'
|
||||
|
|
|
|||
99
tests/commands/hazmat_test.py
Normal file
99
tests/commands/hazmat_test.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.commands.hazmat import _cmd_filenames
|
||||
from pre_commit.commands.hazmat import main
|
||||
from testing.util import cwd
|
||||
|
||||
|
||||
def test_cmd_filenames_no_dash_dash():
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
_cmd_filenames(('no', 'dashdash', 'here'))
|
||||
msg, = excinfo.value.args
|
||||
assert msg == 'hazmat entry must end with `--`'
|
||||
|
||||
|
||||
def test_cmd_filenames_no_filenames():
|
||||
cmd, filenames = _cmd_filenames(('hello', 'world', '--'))
|
||||
assert cmd == ('hello', 'world')
|
||||
assert filenames == ()
|
||||
|
||||
|
||||
def test_cmd_filenames_some_filenames():
|
||||
cmd, filenames = _cmd_filenames(('hello', 'world', '--', 'f1', 'f2'))
|
||||
assert cmd == ('hello', 'world')
|
||||
assert filenames == ('f1', 'f2')
|
||||
|
||||
|
||||
def test_cmd_filenames_multiple_dashdash():
|
||||
cmd, filenames = _cmd_filenames(('hello', '--', 'arg', '--', 'f1', 'f2'))
|
||||
assert cmd == ('hello', '--', 'arg')
|
||||
assert filenames == ('f1', 'f2')
|
||||
|
||||
|
||||
def test_cd_unexpected_filename():
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
main(('cd', 'subdir', 'cmd', '--', 'subdir/1', 'not-subdir/2'))
|
||||
msg, = excinfo.value.args
|
||||
assert msg == "unexpected file without prefix='subdir/': not-subdir/2"
|
||||
|
||||
|
||||
def _norm(out):
|
||||
return out.replace('\r\n', '\n')
|
||||
|
||||
|
||||
def test_cd(tmp_path, capfd):
|
||||
subdir = tmp_path.joinpath('subdir')
|
||||
subdir.mkdir()
|
||||
subdir.joinpath('a').write_text('a')
|
||||
subdir.joinpath('b').write_text('b')
|
||||
|
||||
with cwd(tmp_path):
|
||||
ret = main((
|
||||
'cd', 'subdir',
|
||||
sys.executable, '-c',
|
||||
'import os; print(os.getcwd());'
|
||||
'import sys; [print(open(f).read()) for f in sys.argv[1:]]',
|
||||
'--',
|
||||
'subdir/a', 'subdir/b',
|
||||
))
|
||||
|
||||
assert ret == 0
|
||||
out, err = capfd.readouterr()
|
||||
assert _norm(out) == f'{subdir}\na\nb\n'
|
||||
assert err == ''
|
||||
|
||||
|
||||
def test_ignore_exit_code(capfd):
|
||||
ret = main((
|
||||
'ignore-exit-code', sys.executable, '-c', 'raise SystemExit("bye")',
|
||||
))
|
||||
assert ret == 0
|
||||
out, err = capfd.readouterr()
|
||||
assert out == ''
|
||||
assert _norm(err) == 'bye\n'
|
||||
|
||||
|
||||
def test_n1(capfd):
|
||||
ret = main((
|
||||
'n1', sys.executable, '-c', 'import sys; print(sys.argv[1:])',
|
||||
'--',
|
||||
'foo', 'bar', 'baz',
|
||||
))
|
||||
assert ret == 0
|
||||
out, err = capfd.readouterr()
|
||||
assert _norm(out) == "['foo']\n['bar']\n['baz']\n"
|
||||
assert err == ''
|
||||
|
||||
|
||||
def test_n1_some_error_code():
|
||||
ret = main((
|
||||
'n1', sys.executable, '-c',
|
||||
'import sys; raise SystemExit(sys.argv[1] == "error")',
|
||||
'--',
|
||||
'ok', 'error', 'ok',
|
||||
))
|
||||
assert ret == 1
|
||||
|
|
@ -1104,6 +1104,19 @@ def test_fail_fast_not_prev_failures(cap_out, store, repo_with_failing_hook):
|
|||
assert printed.count(b'run me!') == 1
|
||||
|
||||
|
||||
def test_fail_fast_run_arg(cap_out, store, repo_with_failing_hook):
|
||||
with modify_config() as config:
|
||||
# More than one hook to demonstrate early exit
|
||||
config['repos'][0]['hooks'] *= 2
|
||||
stage_a_file()
|
||||
|
||||
ret, printed = _do_run(
|
||||
cap_out, store, repo_with_failing_hook, run_opts(fail_fast=True),
|
||||
)
|
||||
# it should have only run one hook due to the CLI flag
|
||||
assert printed.count(b'Failing hook') == 1
|
||||
|
||||
|
||||
def test_classifier_removes_dne():
|
||||
classifier = Classifier(('this_file_does_not_exist',))
|
||||
assert classifier.filenames == []
|
||||
|
|
|
|||
|
|
@ -141,6 +141,15 @@ def test_get_conflicted_files_unstaged_files(in_merge_conflict):
|
|||
assert ret == {'conflict_file'}
|
||||
|
||||
|
||||
def test_get_conflicted_files_with_file_named_head(in_merge_conflict):
|
||||
resolve_conflict()
|
||||
open('HEAD', 'w').close()
|
||||
cmd_output('git', 'add', 'HEAD')
|
||||
|
||||
ret = set(git.get_conflicted_files())
|
||||
assert ret == {'conflict_file', 'HEAD'}
|
||||
|
||||
|
||||
MERGE_MSG = b"Merge branch 'foo' into bar\n\nConflicts:\n\tconflict_file\n"
|
||||
OTHER_MERGE_MSG = MERGE_MSG + b'\tother_conflict_file\n'
|
||||
|
||||
|
|
|
|||
|
|
@ -164,3 +164,15 @@ def test_basic_run_hook(tmp_path):
|
|||
assert ret == 0
|
||||
out = out.replace(b'\r\n', b'\n')
|
||||
assert out == b'hi hello file file file\n'
|
||||
|
||||
|
||||
def test_hook_cmd():
|
||||
assert lang_base.hook_cmd('echo hi', ()) == ('echo', 'hi')
|
||||
|
||||
|
||||
def test_hook_cmd_hazmat():
|
||||
ret = lang_base.hook_cmd('pre-commit hazmat cd a echo -- b', ())
|
||||
assert ret == (
|
||||
sys.executable, '-m', 'pre_commit.commands.hazmat',
|
||||
'cd', 'a', 'echo', '--', 'b',
|
||||
)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from testing.language_helpers import run_language
|
|||
def test_dart(tmp_path):
|
||||
pubspec_yaml = '''\
|
||||
environment:
|
||||
sdk: '>=2.10.0 <3.0.0'
|
||||
sdk: '>=2.12.0 <4.0.0'
|
||||
|
||||
name: hello_world_dart
|
||||
|
||||
|
|
|
|||
|
|
@ -14,40 +14,173 @@ from pre_commit.util import CalledProcessError
|
|||
from testing.language_helpers import run_language
|
||||
from testing.util import xfailif_windows
|
||||
|
||||
DOCKER_CGROUP_EXAMPLE = b'''\
|
||||
12:hugetlb:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
11:blkio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
10:freezer:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
9:cpu,cpuacct:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
8:pids:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
7:rdma:/
|
||||
6:net_cls,net_prio:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
5:cpuset:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
4:devices:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
3:memory:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
2:perf_event:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
1:name=systemd:/docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7
|
||||
0::/system.slice/containerd.service
|
||||
DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\
|
||||
759 717 0:52 / / rw,relatime master:300 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/PCPE5P5IVGM7CFCPJR353N3ONK:/var/lib/docker/overlay2/l/EQFSDHFAJ333VEMEJD4ZTRIZCB,upperdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/diff,workdir=/var/lib/docker/overlay2/0d9f6bf186030d796505b87d6daa92297355e47641e283d3c09d83a7f221e462/work
|
||||
760 759 0:58 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
761 759 0:59 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
762 761 0:60 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666
|
||||
763 759 0:61 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro
|
||||
764 763 0:62 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs tmpfs rw,mode=755,inode64
|
||||
765 764 0:29 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime master:11 - cgroup cgroup rw,xattr,name=systemd
|
||||
766 764 0:32 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime master:15 - cgroup cgroup rw,rdma
|
||||
767 764 0:33 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime master:16 - cgroup cgroup rw,cpu,cpuacct
|
||||
768 764 0:34 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime master:17 - cgroup cgroup rw,cpuset
|
||||
769 764 0:35 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime master:18 - cgroup cgroup rw,pids
|
||||
770 764 0:36 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime master:19 - cgroup cgroup rw,memory
|
||||
771 764 0:37 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime master:20 - cgroup cgroup rw,perf_event
|
||||
772 764 0:38 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime master:21 - cgroup cgroup rw,net_cls,net_prio
|
||||
773 764 0:39 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime master:22 - cgroup cgroup rw,blkio
|
||||
774 764 0:40 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime master:23 - cgroup cgroup rw,misc
|
||||
775 764 0:41 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime master:24 - cgroup cgroup rw,hugetlb
|
||||
776 764 0:42 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime master:25 - cgroup cgroup rw,devices
|
||||
777 764 0:43 /docker/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7 /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime master:26 - cgroup cgroup rw,freezer
|
||||
778 761 0:57 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw
|
||||
779 761 0:63 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64
|
||||
780 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro
|
||||
781 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro
|
||||
782 759 8:5 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda5 rw,errors=remount-ro
|
||||
718 761 0:60 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666
|
||||
719 760 0:58 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
720 760 0:58 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
721 760 0:58 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
722 760 0:58 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
723 760 0:58 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
724 760 0:64 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64
|
||||
725 760 0:65 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64
|
||||
726 760 0:59 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
727 760 0:59 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
728 760 0:59 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
729 760 0:66 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64
|
||||
730 763 0:67 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64
|
||||
731 763 0:68 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs ro,inode64
|
||||
''' # noqa: E501
|
||||
|
||||
DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\
|
||||
721 386 0:45 / / rw,relatime master:218 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/QHZ7OM7P4AQD3XLG274ZPWAJCV:/var/lib/docker/overlay2/l/5RFG6SZWVGOG2NKEYXJDQCQYX5,upperdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/diff,workdir=/var/lib/docker/overlay2/e4ad859fc5d4791932b9b976052f01fb0063e01de3cef916e40ae2121f6a166e/work,nouserxattr
|
||||
722 721 0:48 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
723 721 0:50 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
724 723 0:51 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666
|
||||
725 721 0:52 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro
|
||||
726 725 0:26 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw,nsdelegate,memory_recursiveprot
|
||||
727 723 0:47 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw
|
||||
728 723 0:53 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k,inode64
|
||||
729 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro
|
||||
730 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hostname /etc/hostname rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro
|
||||
731 721 8:3 /var/lib/docker/containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/hosts /etc/hosts rw,relatime - ext4 /dev/sda3 rw,errors=remount-ro
|
||||
387 723 0:51 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666
|
||||
388 722 0:48 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
389 722 0:48 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
525 722 0:48 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
526 722 0:48 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
571 722 0:48 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
572 722 0:57 / /proc/asound ro,relatime - tmpfs tmpfs ro,inode64
|
||||
575 722 0:58 / /proc/acpi ro,relatime - tmpfs tmpfs ro,inode64
|
||||
576 722 0:50 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
577 722 0:50 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
578 722 0:50 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,inode64
|
||||
579 722 0:59 / /proc/scsi ro,relatime - tmpfs tmpfs ro,inode64
|
||||
580 725 0:60 / /sys/firmware ro,relatime - tmpfs tmpfs ro,inode64
|
||||
''' # noqa: E501
|
||||
|
||||
PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE = b'''\
|
||||
1200 915 0:57 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/ZWAU3VY3ZHABQJRBUAFPBX7R5D,upperdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/72504ef163fda63838930450553b7306412ccad139a007626732b3dc43af5200/work,volatile,userxattr
|
||||
1204 1200 0:62 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
1205 1200 0:63 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64
|
||||
1206 1200 0:64 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw
|
||||
1207 1205 0:65 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666
|
||||
1208 1205 0:61 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw
|
||||
1209 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64
|
||||
1210 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64
|
||||
1211 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64
|
||||
1212 1205 0:56 / /dev/shm rw,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64
|
||||
1213 1200 0:53 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=814036k,mode=700,uid=1000,gid=1000,inode64
|
||||
1214 1206 0:66 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime - tmpfs cgroup rw,size=1024k,uid=1000,gid=1000,inode64
|
||||
1215 1214 0:43 / /sys/fs/cgroup/freezer ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,freezer
|
||||
1216 1214 0:42 /user.slice /sys/fs/cgroup/devices ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,devices
|
||||
1217 1214 0:41 / /sys/fs/cgroup/hugetlb ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,hugetlb
|
||||
1218 1214 0:40 / /sys/fs/cgroup/misc ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,misc
|
||||
1219 1214 0:39 / /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,blkio
|
||||
1220 1214 0:38 / /sys/fs/cgroup/net_cls,net_prio ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,net_cls,net_prio
|
||||
1221 1214 0:37 / /sys/fs/cgroup/perf_event ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,perf_event
|
||||
1222 1214 0:36 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/memory ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,memory
|
||||
1223 1214 0:35 /user.slice/user-1000.slice/user@1000.service /sys/fs/cgroup/pids ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,pids
|
||||
1224 1214 0:34 / /sys/fs/cgroup/cpuset ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpuset
|
||||
1225 1214 0:33 / /sys/fs/cgroup/cpu,cpuacct ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,cpu,cpuacct
|
||||
1226 1214 0:32 / /sys/fs/cgroup/rdma ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,rdma
|
||||
1227 1214 0:29 /user.slice/user-1000.slice/user@1000.service/apps.slice/apps-org.gnome.Terminal.slice/vte-spawn-0c50448e-b395-4d76-8b92-379f16e5066f.scope /sys/fs/cgroup/systemd ro,nosuid,nodev,noexec,relatime - cgroup cgroup rw,xattr,name=systemd
|
||||
1228 1205 0:5 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1229 1205 0:5 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1230 1205 0:5 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1231 1205 0:5 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1232 1205 0:5 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1233 1205 0:5 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1234 1204 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
1235 1204 0:5 /null /proc/kcore rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1236 1204 0:5 /null /proc/keys rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1237 1204 0:5 /null /proc/timer_list rw,nosuid,noexec,relatime - devtmpfs udev rw,size=4031656k,nr_inodes=1007914,mode=755,inode64
|
||||
1238 1204 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
1239 1206 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
1240 1206 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
1241 1204 0:62 /asound /proc/asound ro,relatime - proc proc rw
|
||||
1242 1204 0:62 /bus /proc/bus ro,relatime - proc proc rw
|
||||
1243 1204 0:62 /fs /proc/fs ro,relatime - proc proc rw
|
||||
1244 1204 0:62 /irq /proc/irq ro,relatime - proc proc rw
|
||||
1245 1204 0:62 /sys /proc/sys ro,relatime - proc proc rw
|
||||
1256 1204 0:62 /sysrq-trigger /proc/sysrq-trigger ro,relatime - proc proc rw
|
||||
916 1205 0:65 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666
|
||||
''' # noqa: E501
|
||||
|
||||
PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE = b'''\
|
||||
685 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/resolv.conf /etc/resolv.conf rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64
|
||||
686 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hosts /etc/hosts rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64
|
||||
687 692 0:50 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=64000k,uid=1000,gid=1000,inode64
|
||||
688 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/.containerenv /run/.containerenv rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64
|
||||
689 690 0:63 /containers/overlay-containers/c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7/userdata/hostname /etc/hostname rw,nosuid,nodev,relatime - tmpfs tmpfs rw,size=1637624k,nr_inodes=409406,mode=700,uid=1000,gid=1000,inode64
|
||||
690 546 0:55 / / rw,relatime - overlay overlay rw,lowerdir=/home/asottile/.local/share/containers/storage/overlay/l/NPOHYOD3PI3YW6TQSGBOVOUSK6,upperdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/diff,workdir=/home/asottile/.local/share/containers/storage/overlay/565c206fb79f876ffd5f069b8bd7a97fb5e47d5d07396b0c395a4ed6725d4a8e/work,redirect_dir=nofollow,uuid=on,volatile,userxattr
|
||||
691 690 0:59 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
692 690 0:61 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755,uid=1000,gid=1000,inode64
|
||||
693 690 0:62 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs rw
|
||||
694 692 0:66 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666
|
||||
695 692 0:58 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw
|
||||
696 693 0:28 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup2 rw,nsdelegate,memory_recursiveprot
|
||||
698 692 0:6 /null /dev/null rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
699 692 0:6 /zero /dev/zero rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
700 692 0:6 /full /dev/full rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
701 692 0:6 /tty /dev/tty rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
702 692 0:6 /random /dev/random rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
703 692 0:6 /urandom /dev/urandom rw,nosuid,noexec,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
704 691 0:67 / /proc/acpi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
705 691 0:6 /null /proc/kcore ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
706 691 0:6 /null /proc/keys ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
707 691 0:6 /null /proc/latency_stats ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
708 691 0:6 /null /proc/timer_list ro,nosuid,relatime - devtmpfs udev rw,size=8147812k,nr_inodes=2036953,mode=755,inode64
|
||||
709 691 0:68 / /proc/scsi ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
710 693 0:69 / /sys/firmware ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
711 693 0:70 / /sys/dev/block ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
712 693 0:71 / /sys/devices/virtual/powercap ro,relatime - tmpfs tmpfs rw,size=0k,uid=1000,gid=1000,inode64
|
||||
713 691 0:59 /asound /proc/asound ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
714 691 0:59 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
715 691 0:59 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
716 691 0:59 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
717 691 0:59 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
718 691 0:59 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw
|
||||
547 692 0:66 /0 /dev/console rw,relatime - devpts devpts rw,gid=100004,mode=620,ptmxmode=666
|
||||
''' # noqa: E501
|
||||
|
||||
# The ID should match the above cgroup example.
|
||||
CONTAINER_ID = 'c33988ec7651ebc867cb24755eaf637a6734088bc7eef59d5799293a9e5450f7' # noqa: E501
|
||||
|
||||
NON_DOCKER_CGROUP_EXAMPLE = b'''\
|
||||
12:perf_event:/
|
||||
11:hugetlb:/
|
||||
10:devices:/
|
||||
9:blkio:/
|
||||
8:rdma:/
|
||||
7:cpuset:/
|
||||
6:cpu,cpuacct:/
|
||||
5:freezer:/
|
||||
4:memory:/
|
||||
3:pids:/
|
||||
2:net_cls,net_prio:/
|
||||
1:name=systemd:/init.scope
|
||||
0::/init.scope
|
||||
'''
|
||||
NON_DOCKER_MOUNTINFO_EXAMPLE = b'''\
|
||||
21 27 0:19 / /sys rw,nosuid,nodev,noexec,relatime shared:7 - sysfs sysfs rw
|
||||
22 27 0:20 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw
|
||||
23 27 0:5 / /dev rw,nosuid,relatime shared:2 - devtmpfs udev rw,size=10219484k,nr_inodes=2554871,mode=755,inode64
|
||||
24 23 0:21 / /dev/pts rw,nosuid,noexec,relatime shared:3 - devpts devpts rw,gid=5,mode=620,ptmxmode=000
|
||||
25 27 0:22 / /run rw,nosuid,nodev,noexec,relatime shared:5 - tmpfs tmpfs rw,size=2047768k,mode=755,inode64
|
||||
27 1 8:2 / / rw,relatime shared:1 - ext4 /dev/sda2 rw,errors=remount-ro
|
||||
28 21 0:6 / /sys/kernel/security rw,nosuid,nodev,noexec,relatime shared:8 - securityfs securityfs rw
|
||||
29 23 0:24 / /dev/shm rw,nosuid,nodev shared:4 - tmpfs tmpfs rw,inode64
|
||||
30 25 0:25 / /run/lock rw,nosuid,nodev,noexec,relatime shared:6 - tmpfs tmpfs rw,size=5120k,inode64
|
||||
''' # noqa: E501
|
||||
|
||||
|
||||
def test_docker_fallback_user():
|
||||
|
|
@ -62,9 +195,46 @@ def test_docker_fallback_user():
|
|||
assert docker.get_docker_user() == ()
|
||||
|
||||
|
||||
def test_in_docker_no_file():
|
||||
@pytest.fixture(autouse=True)
|
||||
def _avoid_cache():
|
||||
with mock.patch.object(
|
||||
docker,
|
||||
'_is_rootless',
|
||||
docker._is_rootless.__wrapped__,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'info_ret',
|
||||
(
|
||||
(0, b'{"SecurityOptions": ["name=rootless","name=cgroupns"]}', b''),
|
||||
(0, b'{"host": {"security": {"rootless": true}}}', b''),
|
||||
),
|
||||
)
|
||||
def test_docker_user_rootless(info_ret):
|
||||
with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret):
|
||||
assert docker.get_docker_user() == ()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'info_ret',
|
||||
(
|
||||
(0, b'{"SecurityOptions": ["name=cgroupns"]}', b''),
|
||||
(0, b'{"host": {"security": {"rootless": false}}}', b''),
|
||||
(0, b'{"response_from_some_other_container_engine": true}', b''),
|
||||
(0, b'{"SecurityOptions": null}', b''),
|
||||
(1, b'', b''),
|
||||
),
|
||||
)
|
||||
def test_docker_user_non_rootless(info_ret):
|
||||
with mock.patch.object(docker, 'cmd_output_b', return_value=info_ret):
|
||||
assert docker.get_docker_user() != ()
|
||||
|
||||
|
||||
def test_container_id_no_file():
|
||||
with mock.patch.object(builtins, 'open', side_effect=FileNotFoundError):
|
||||
assert docker._is_in_docker() is False
|
||||
assert docker._get_container_id() is None
|
||||
|
||||
|
||||
def _mock_open(data):
|
||||
|
|
@ -76,38 +246,33 @@ def _mock_open(data):
|
|||
)
|
||||
|
||||
|
||||
def test_in_docker_docker_in_file():
|
||||
with _mock_open(DOCKER_CGROUP_EXAMPLE):
|
||||
assert docker._is_in_docker() is True
|
||||
|
||||
|
||||
def test_in_docker_docker_not_in_file():
|
||||
with _mock_open(NON_DOCKER_CGROUP_EXAMPLE):
|
||||
assert docker._is_in_docker() is False
|
||||
def test_container_id_not_in_file():
|
||||
with _mock_open(NON_DOCKER_MOUNTINFO_EXAMPLE):
|
||||
assert docker._get_container_id() is None
|
||||
|
||||
|
||||
def test_get_container_id():
|
||||
with _mock_open(DOCKER_CGROUP_EXAMPLE):
|
||||
with _mock_open(DOCKER_CGROUPS_V1_MOUNTINFO_EXAMPLE):
|
||||
assert docker._get_container_id() == CONTAINER_ID
|
||||
with _mock_open(DOCKER_CGROUPS_V2_MOUNTINFO_EXAMPLE):
|
||||
assert docker._get_container_id() == CONTAINER_ID
|
||||
with _mock_open(PODMAN_CGROUPS_V1_MOUNTINFO_EXAMPLE):
|
||||
assert docker._get_container_id() == CONTAINER_ID
|
||||
with _mock_open(PODMAN_CGROUPS_V2_MOUNTINFO_EXAMPLE):
|
||||
assert docker._get_container_id() == CONTAINER_ID
|
||||
|
||||
|
||||
def test_get_container_id_failure():
|
||||
with _mock_open(b''), pytest.raises(RuntimeError):
|
||||
docker._get_container_id()
|
||||
|
||||
|
||||
def test_get_docker_path_not_in_docker_returns_same():
|
||||
with mock.patch.object(docker, '_is_in_docker', return_value=False):
|
||||
with _mock_open(b''):
|
||||
assert docker._get_docker_path('abc') == 'abc'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_docker():
|
||||
with mock.patch.object(docker, '_is_in_docker', return_value=True):
|
||||
with mock.patch.object(
|
||||
docker, '_get_container_id', return_value=CONTAINER_ID,
|
||||
):
|
||||
yield
|
||||
with mock.patch.object(
|
||||
docker, '_get_container_id', return_value=CONTAINER_ID,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
def _linux_commonpath():
|
||||
|
|
@ -195,3 +360,14 @@ CMD ["echo", "This is overwritten by the entry"']
|
|||
|
||||
ret = run_language(tmp_path, docker, 'echo hello hello world')
|
||||
assert ret == (0, b'hello hello world\n')
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_docker_hook_mount_permissions(tmp_path):
|
||||
dockerfile = '''\
|
||||
FROM ubuntu:22.04
|
||||
'''
|
||||
tmp_path.joinpath('Dockerfile').write_text(dockerfile)
|
||||
|
||||
retcode, _ = run_language(tmp_path, docker, 'touch', ('README.md',))
|
||||
assert retcode == 0
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
from pre_commit.languages import julia
|
||||
from testing.language_helpers import run_language
|
||||
from testing.util import cwd
|
||||
|
|
@ -28,6 +31,17 @@ def test_julia_hook(tmp_path):
|
|||
assert run_language(tmp_path, julia, 'src/main.jl') == expected
|
||||
|
||||
|
||||
def test_julia_hook_with_startup(tmp_path):
|
||||
depot_path = tmp_path.joinpath('depot')
|
||||
depot_path.joinpath('config').mkdir(parents=True)
|
||||
startup = depot_path.joinpath('config', 'startup.jl')
|
||||
startup.write_text('error("Startup file used!")\n')
|
||||
|
||||
depo_path_var = f'{depot_path}{os.pathsep}'
|
||||
with mock.patch.dict(os.environ, {'JULIA_DEPOT_PATH': depo_path_var}):
|
||||
test_julia_hook(tmp_path)
|
||||
|
||||
|
||||
def test_julia_hook_manifest(tmp_path):
|
||||
code = """
|
||||
using Example
|
||||
|
|
|
|||
|
|
@ -10,8 +10,11 @@ import pre_commit.constants as C
|
|||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.languages import python
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.store import _make_local_repo
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import win_exe
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.language_helpers import run_language
|
||||
|
||||
|
||||
|
|
@ -34,6 +37,72 @@ def test_read_pyvenv_cfg_non_utf8(tmpdir):
|
|||
assert python._read_pyvenv_cfg(pyvenv_cfg) == expected
|
||||
|
||||
|
||||
def _get_default_version(
|
||||
*,
|
||||
impl: str,
|
||||
exe: str,
|
||||
found: set[str],
|
||||
version: tuple[int, int],
|
||||
) -> str:
|
||||
sys_exe = f'/fake/path/{exe}'
|
||||
sys_impl = auto_namedtuple(name=impl)
|
||||
sys_ver = auto_namedtuple(major=version[0], minor=version[1])
|
||||
|
||||
def find_exe(s):
|
||||
if s in found:
|
||||
return f'/fake/path/found/{exe}'
|
||||
else:
|
||||
return None
|
||||
|
||||
with (
|
||||
mock.patch.object(sys, 'implementation', sys_impl),
|
||||
mock.patch.object(sys, 'executable', sys_exe),
|
||||
mock.patch.object(sys, 'version_info', sys_ver),
|
||||
mock.patch.object(python, 'find_executable', find_exe),
|
||||
):
|
||||
return python.get_default_version.__wrapped__()
|
||||
|
||||
|
||||
def test_default_version_sys_executable_found():
|
||||
ret = _get_default_version(
|
||||
impl='cpython',
|
||||
exe='python3.12',
|
||||
found={'python3.12'},
|
||||
version=(3, 12),
|
||||
)
|
||||
assert ret == 'python3.12'
|
||||
|
||||
|
||||
def test_default_version_picks_specific_when_found():
|
||||
ret = _get_default_version(
|
||||
impl='cpython',
|
||||
exe='python3',
|
||||
found={'python3', 'python3.12'},
|
||||
version=(3, 12),
|
||||
)
|
||||
assert ret == 'python3.12'
|
||||
|
||||
|
||||
def test_default_version_picks_pypy_versioned_exe():
|
||||
ret = _get_default_version(
|
||||
impl='pypy',
|
||||
exe='python',
|
||||
found={'pypy3.12', 'python3'},
|
||||
version=(3, 12),
|
||||
)
|
||||
assert ret == 'pypy3.12'
|
||||
|
||||
|
||||
def test_default_version_picks_pypy_unversioned_exe():
|
||||
ret = _get_default_version(
|
||||
impl='pypy',
|
||||
exe='python',
|
||||
found={'pypy3', 'python3'},
|
||||
version=(3, 12),
|
||||
)
|
||||
assert ret == 'pypy3'
|
||||
|
||||
|
||||
def test_norm_version_expanduser():
|
||||
home = os.path.expanduser('~')
|
||||
if sys.platform == 'win32': # pragma: win32 cover
|
||||
|
|
@ -284,3 +353,15 @@ def test_python_hook_weird_setup_cfg(tmp_path):
|
|||
|
||||
ret = run_language(tmp_path, python, 'socks', [os.devnull])
|
||||
assert ret == (0, f'[{os.devnull!r}]\nhello hello\n'.encode())
|
||||
|
||||
|
||||
def test_local_repo_with_other_artifacts(tmp_path):
|
||||
cmd_output_b('git', 'init', tmp_path)
|
||||
_make_local_repo(str(tmp_path))
|
||||
# pretend a rust install also ran here
|
||||
tmp_path.joinpath('target').mkdir()
|
||||
|
||||
ret, out = run_language(tmp_path, python, 'python --version')
|
||||
|
||||
assert ret == 0
|
||||
assert out.startswith(b'Python ')
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pre_commit.languages import system
|
||||
from testing.language_helpers import run_language
|
||||
|
||||
|
||||
def test_system_language(tmp_path):
|
||||
expected = (0, b'hello hello world\n')
|
||||
assert run_language(tmp_path, system, 'echo hello hello world') == expected
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pre_commit.languages import script
|
||||
from pre_commit.languages import unsupported_script
|
||||
from pre_commit.util import make_executable
|
||||
from testing.language_helpers import run_language
|
||||
|
||||
|
||||
def test_script_language(tmp_path):
|
||||
def test_unsupported_script_language(tmp_path):
|
||||
exe = tmp_path.joinpath('main')
|
||||
exe.write_text('#!/usr/bin/env bash\necho hello hello world\n')
|
||||
make_executable(exe)
|
||||
|
||||
expected = (0, b'hello hello world\n')
|
||||
assert run_language(tmp_path, script, 'main') == expected
|
||||
assert run_language(tmp_path, unsupported_script, 'main') == expected
|
||||
10
tests/languages/unsupported_test.py
Normal file
10
tests/languages/unsupported_test.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pre_commit.languages import unsupported
|
||||
from testing.language_helpers import run_language
|
||||
|
||||
|
||||
def test_unsupported_language(tmp_path):
|
||||
expected = (0, b'hello hello world\n')
|
||||
ret = run_language(tmp_path, unsupported, 'echo hello hello world')
|
||||
assert ret == expected
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
|
|
@ -8,6 +9,7 @@ import pytest
|
|||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import main
|
||||
from pre_commit.commands import hazmat
|
||||
from pre_commit.errors import FatalError
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
|
|
@ -97,11 +99,9 @@ CMDS = tuple(fn.replace('_', '-') for fn in FNS)
|
|||
|
||||
@pytest.fixture
|
||||
def mock_commands():
|
||||
mcks = {fn: mock.patch.object(main, fn).start() for fn in FNS}
|
||||
ret = auto_namedtuple(**mcks)
|
||||
yield ret
|
||||
for mck in ret:
|
||||
mck.stop()
|
||||
with contextlib.ExitStack() as ctx:
|
||||
mcks = {f: ctx.enter_context(mock.patch.object(main, f)) for f in FNS}
|
||||
yield auto_namedtuple(**mcks)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
@ -158,6 +158,17 @@ def test_all_cmds(command, mock_commands, mock_store_dir):
|
|||
assert_only_one_mock_called(mock_commands)
|
||||
|
||||
|
||||
def test_hazmat(mock_store_dir):
|
||||
with mock.patch.object(hazmat, 'impl') as mck:
|
||||
main.main(('hazmat', 'cd', 'subdir', '--', 'cmd', '--', 'f1', 'f2'))
|
||||
assert mck.call_count == 1
|
||||
(arg,), dct = mck.call_args
|
||||
assert dct == {}
|
||||
assert arg.tool == 'cd'
|
||||
assert arg.subdir == 'subdir'
|
||||
assert arg.cmd == ['cmd', '--', 'f1', 'f2']
|
||||
|
||||
|
||||
def test_try_repo(mock_store_dir):
|
||||
with mock.patch.object(main, 'try_repo') as patch:
|
||||
main.main(('try-repo', '.'))
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from pre_commit.clientlib import CONFIG_SCHEMA
|
|||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.hook import Hook
|
||||
from pre_commit.languages import python
|
||||
from pre_commit.languages import system
|
||||
from pre_commit.languages import unsupported
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.repository import _hook_installed
|
||||
from pre_commit.repository import all_hooks
|
||||
|
|
@ -80,13 +80,6 @@ def _test_hook_repo(
|
|||
assert out == expected
|
||||
|
||||
|
||||
def test_system_hook_with_spaces(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'system_hook_with_spaces_repo',
|
||||
'system-hook-with-spaces', [os.devnull], b'Hello World\n',
|
||||
)
|
||||
|
||||
|
||||
def test_missing_executable(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'not_found_exe',
|
||||
|
|
@ -431,7 +424,7 @@ def test_manifest_hooks(tempdir_factory, store):
|
|||
exclude_types=[],
|
||||
files='',
|
||||
id='bash_hook',
|
||||
language='script',
|
||||
language='unsupported_script',
|
||||
language_version='default',
|
||||
log_file='',
|
||||
minimum_pre_commit_version='0',
|
||||
|
|
@ -464,7 +457,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog):
|
|||
'hooks': [{
|
||||
'id': 'system-hook',
|
||||
'name': 'system-hook',
|
||||
'language': 'system',
|
||||
'language': 'unsupported',
|
||||
'entry': 'python3 -c "import sys; print(sys.version)"',
|
||||
'language_version': 'python3.10',
|
||||
}],
|
||||
|
|
@ -476,7 +469,7 @@ def test_non_installable_hook_error_for_language_version(store, caplog):
|
|||
msg, = caplog.messages
|
||||
assert msg == (
|
||||
'The hook `system-hook` specifies `language_version` but is using '
|
||||
'language `system` which does not install an environment. '
|
||||
'language `unsupported` which does not install an environment. '
|
||||
'Perhaps you meant to use a specific language?'
|
||||
)
|
||||
|
||||
|
|
@ -487,7 +480,7 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog):
|
|||
'hooks': [{
|
||||
'id': 'system-hook',
|
||||
'name': 'system-hook',
|
||||
'language': 'system',
|
||||
'language': 'unsupported',
|
||||
'entry': 'python3 -c "import sys; print(sys.version)"',
|
||||
'additional_dependencies': ['astpretty'],
|
||||
}],
|
||||
|
|
@ -499,17 +492,28 @@ def test_non_installable_hook_error_for_additional_dependencies(store, caplog):
|
|||
msg, = caplog.messages
|
||||
assert msg == (
|
||||
'The hook `system-hook` specifies `additional_dependencies` but is '
|
||||
'using language `system` which does not install an environment. '
|
||||
'using language `unsupported` which does not install an environment. '
|
||||
'Perhaps you meant to use a specific language?'
|
||||
)
|
||||
|
||||
|
||||
def test_args_with_spaces_and_quotes(tmp_path):
|
||||
ret = run_language(
|
||||
tmp_path, system,
|
||||
tmp_path, unsupported,
|
||||
f"{shlex.quote(sys.executable)} -c 'import sys; print(sys.argv[1:])'",
|
||||
('i have spaces', 'and"\'quotes', '$and !this'),
|
||||
)
|
||||
|
||||
expected = b"['i have spaces', 'and\"\\'quotes', '$and !this']\n"
|
||||
assert ret == (0, expected)
|
||||
|
||||
|
||||
def test_hazmat(tmp_path):
|
||||
ret = run_language(
|
||||
tmp_path, unsupported,
|
||||
f'pre-commit hazmat ignore-exit-code {shlex.quote(sys.executable)} '
|
||||
f"-c 'import sys; raise SystemExit(sys.argv[1:])'",
|
||||
('f1', 'f2'),
|
||||
)
|
||||
expected = b"['f1', 'f2']\n"
|
||||
assert ret == (0, expected)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,17 @@ from testing.util import git_commit
|
|||
from testing.util import xfailif_windows
|
||||
|
||||
|
||||
def _select_all_configs(store: Store) -> list[str]:
|
||||
with store.connect() as db:
|
||||
rows = db.execute('SELECT * FROM configs').fetchall()
|
||||
return [path for path, in rows]
|
||||
|
||||
|
||||
def _select_all_repos(store: Store) -> list[tuple[str, str, str]]:
|
||||
with store.connect() as db:
|
||||
return db.execute('SELECT repo, ref, path FROM repos').fetchall()
|
||||
|
||||
|
||||
def test_our_session_fixture_works():
|
||||
"""There's a session fixture which makes `Store` invariantly raise to
|
||||
prevent writing to the home directory.
|
||||
|
|
@ -91,7 +102,7 @@ def test_clone(store, tempdir_factory, caplog):
|
|||
assert git.head_rev(ret) == rev
|
||||
|
||||
# Assert there's an entry in the sqlite db for this
|
||||
assert store.select_all_repos() == [(path, rev, ret)]
|
||||
assert _select_all_repos(store) == [(path, rev, ret)]
|
||||
|
||||
|
||||
def test_warning_for_deprecated_stages_on_init(store, tempdir_factory, caplog):
|
||||
|
|
@ -217,7 +228,7 @@ def test_clone_shallow_failure_fallback_to_complete(
|
|||
assert git.head_rev(ret) == rev
|
||||
|
||||
# Assert there's an entry in the sqlite db for this
|
||||
assert store.select_all_repos() == [(path, rev, ret)]
|
||||
assert _select_all_repos(store) == [(path, rev, ret)]
|
||||
|
||||
|
||||
def test_clone_tag_not_on_mainline(store, tempdir_factory):
|
||||
|
|
@ -265,7 +276,7 @@ def test_mark_config_as_used(store, tmpdir):
|
|||
with tmpdir.as_cwd():
|
||||
f = tmpdir.join('f').ensure()
|
||||
store.mark_config_used('f')
|
||||
assert store.select_all_configs() == [f.strpath]
|
||||
assert _select_all_configs(store) == [f.strpath]
|
||||
|
||||
|
||||
def test_mark_config_as_used_idempotent(store, tmpdir):
|
||||
|
|
@ -275,21 +286,12 @@ def test_mark_config_as_used_idempotent(store, tmpdir):
|
|||
|
||||
def test_mark_config_as_used_does_not_exist(store):
|
||||
store.mark_config_used('f')
|
||||
assert store.select_all_configs() == []
|
||||
|
||||
|
||||
def _simulate_pre_1_14_0(store):
|
||||
with store.connect() as db:
|
||||
db.executescript('DROP TABLE configs')
|
||||
|
||||
|
||||
def test_select_all_configs_roll_forward(store):
|
||||
_simulate_pre_1_14_0(store)
|
||||
assert store.select_all_configs() == []
|
||||
assert _select_all_configs(store) == []
|
||||
|
||||
|
||||
def test_mark_config_as_used_roll_forward(store, tmpdir):
|
||||
_simulate_pre_1_14_0(store)
|
||||
with store.connect() as db: # simulate pre-1.14.0
|
||||
db.executescript('DROP TABLE configs')
|
||||
test_mark_config_as_used(store, tmpdir)
|
||||
|
||||
|
||||
|
|
@ -314,7 +316,7 @@ def test_mark_config_as_used_readonly(tmpdir):
|
|||
assert store.readonly
|
||||
# should be skipped due to readonly
|
||||
store.mark_config_used(str(cfg))
|
||||
assert store.select_all_configs() == []
|
||||
assert _select_all_configs(store) == []
|
||||
|
||||
|
||||
def test_clone_with_recursive_submodules(store, tmp_path):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue