mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Merge pull request #1281 from pre-commit/py2_cleanup_more
Some manual python 2 cleanup
This commit is contained in:
commit
489d9f9926
40 changed files with 228 additions and 389 deletions
|
|
@ -1,7 +1,7 @@
|
|||
import argparse
|
||||
import functools
|
||||
import logging
|
||||
import pipes
|
||||
import shlex
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
|
|
@ -25,18 +25,17 @@ check_string_regex = cfgv.check_and(cfgv.check_string, cfgv.check_regex)
|
|||
def check_type_tag(tag: str) -> None:
|
||||
if tag not in ALL_TAGS:
|
||||
raise cfgv.ValidationError(
|
||||
'Type tag {!r} is not recognized. '
|
||||
'Try upgrading identify and pre-commit?'.format(tag),
|
||||
f'Type tag {tag!r} is not recognized. '
|
||||
f'Try upgrading identify and pre-commit?',
|
||||
)
|
||||
|
||||
|
||||
def check_min_version(version: str) -> None:
|
||||
if parse_version(version) > parse_version(C.VERSION):
|
||||
raise cfgv.ValidationError(
|
||||
'pre-commit version {} is required but version {} is installed. '
|
||||
'Perhaps run `pip install --upgrade pre-commit`.'.format(
|
||||
version, C.VERSION,
|
||||
),
|
||||
f'pre-commit version {version} is required but version '
|
||||
f'{C.VERSION} is installed. '
|
||||
f'Perhaps run `pip install --upgrade pre-commit`.',
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -142,9 +141,7 @@ def _entry(modname: str) -> str:
|
|||
runner, so to prevent issues with spaces and backslashes (on Windows)
|
||||
it must be quoted here.
|
||||
"""
|
||||
return '{} -m pre_commit.meta_hooks.{}'.format(
|
||||
pipes.quote(sys.executable), modname,
|
||||
)
|
||||
return f'{shlex.quote(sys.executable)} -m pre_commit.meta_hooks.{modname}'
|
||||
|
||||
|
||||
def warn_unknown_keys_root(
|
||||
|
|
@ -152,9 +149,7 @@ def warn_unknown_keys_root(
|
|||
orig_keys: Sequence[str],
|
||||
dct: Dict[str, str],
|
||||
) -> None:
|
||||
logger.warning(
|
||||
'Unexpected key(s) present at root: {}'.format(', '.join(extra)),
|
||||
)
|
||||
logger.warning(f'Unexpected key(s) present at root: {", ".join(extra)}')
|
||||
|
||||
|
||||
def warn_unknown_keys_repo(
|
||||
|
|
@ -163,9 +158,7 @@ def warn_unknown_keys_repo(
|
|||
dct: Dict[str, str],
|
||||
) -> None:
|
||||
logger.warning(
|
||||
'Unexpected key(s) present on {}: {}'.format(
|
||||
dct['repo'], ', '.join(extra),
|
||||
),
|
||||
f'Unexpected key(s) present on {dct["repo"]}: {", ".join(extra)}',
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -80,13 +80,12 @@ def _check_hooks_still_exist_at_rev(
|
|||
hooks_missing = hooks - {hook['id'] for hook in manifest}
|
||||
if hooks_missing:
|
||||
raise RepositoryCannotBeUpdatedError(
|
||||
'Cannot update because the tip of master is missing these hooks:\n'
|
||||
'{}'.format(', '.join(sorted(hooks_missing))),
|
||||
f'Cannot update because the tip of HEAD is missing these hooks:\n'
|
||||
f'{", ".join(sorted(hooks_missing))}',
|
||||
)
|
||||
|
||||
|
||||
REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
|
||||
REV_LINE_FMT = '{}rev:{}{}{}{}'
|
||||
|
||||
|
||||
def _original_lines(
|
||||
|
|
@ -122,13 +121,11 @@ def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
|
|||
new_rev = new_rev_s.split(':', 1)[1].strip()
|
||||
if rev_info.frozen is not None:
|
||||
comment = f' # frozen: {rev_info.frozen}'
|
||||
elif match.group(4).strip().startswith('# frozen:'):
|
||||
elif match[4].strip().startswith('# frozen:'):
|
||||
comment = ''
|
||||
else:
|
||||
comment = match.group(4)
|
||||
lines[idx] = REV_LINE_FMT.format(
|
||||
match.group(1), match.group(2), new_rev, comment, match.group(5),
|
||||
)
|
||||
comment = match[4]
|
||||
lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[5]}'
|
||||
|
||||
with open(path, 'w') as f:
|
||||
f.write(''.join(lines))
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from pre_commit.repository import all_hooks
|
|||
from pre_commit.repository import install_hook_envs
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import mkdirp
|
||||
from pre_commit.util import resource_text
|
||||
|
||||
|
||||
|
|
@ -78,7 +77,7 @@ def _install_hook_script(
|
|||
) -> None:
|
||||
hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir)
|
||||
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
os.makedirs(os.path.dirname(hook_path), exist_ok=True)
|
||||
|
||||
# If we have an existing hook, move it to pre-commit.legacy
|
||||
if os.path.lexists(hook_path) and not is_our_script(hook_path):
|
||||
|
|
@ -89,8 +88,8 @@ def _install_hook_script(
|
|||
os.remove(legacy_path)
|
||||
elif os.path.exists(legacy_path):
|
||||
output.write_line(
|
||||
'Running in migration mode with existing hooks at {}\n'
|
||||
'Use -f to use only pre-commit.'.format(legacy_path),
|
||||
f'Running in migration mode with existing hooks at {legacy_path}\n'
|
||||
f'Use -f to use only pre-commit.',
|
||||
)
|
||||
|
||||
params = {
|
||||
|
|
@ -110,7 +109,7 @@ def _install_hook_script(
|
|||
hook_file.write(before + TEMPLATE_START)
|
||||
for line in to_template.splitlines():
|
||||
var = line.split()[0]
|
||||
hook_file.write('{} = {!r}\n'.format(var, params[var]))
|
||||
hook_file.write(f'{var} = {params[var]!r}\n')
|
||||
hook_file.write(TEMPLATE_END + after)
|
||||
make_executable(hook_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import argparse
|
||||
import contextlib
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
|
@ -27,7 +28,6 @@ from pre_commit.staged_files_only import staged_files_only
|
|||
from pre_commit.store import Store
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import EnvironT
|
||||
from pre_commit.util import noop_context
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
|
@ -173,7 +173,7 @@ def _run_single_hook(
|
|||
|
||||
if out.strip():
|
||||
output.write_line()
|
||||
output.write_line(out.strip(), logfile_name=hook.log_file)
|
||||
output.write_line_b(out.strip(), logfile_name=hook.log_file)
|
||||
output.write_line()
|
||||
|
||||
return files_modified or bool(retcode)
|
||||
|
|
@ -243,9 +243,10 @@ def _run_hooks(
|
|||
output.write_line('All changes made by hooks:')
|
||||
# args.color is a boolean.
|
||||
# See user_color function in color.py
|
||||
git_color_opt = 'always' if args.color else 'never'
|
||||
subprocess.call((
|
||||
'git', '--no-pager', 'diff', '--no-ext-diff',
|
||||
'--color={}'.format({True: 'always', False: 'never'}[args.color]),
|
||||
f'--color={git_color_opt}',
|
||||
))
|
||||
|
||||
return retval
|
||||
|
|
@ -271,7 +272,7 @@ def run(
|
|||
args: argparse.Namespace,
|
||||
environ: EnvironT = os.environ,
|
||||
) -> int:
|
||||
no_stash = args.all_files or bool(args.files)
|
||||
stash = not args.all_files and not args.files
|
||||
|
||||
# Check if we have unresolved merge conflict files and fail fast.
|
||||
if _has_unmerged_paths():
|
||||
|
|
@ -280,10 +281,10 @@ def run(
|
|||
if bool(args.source) != bool(args.origin):
|
||||
logger.error('Specify both --origin and --source.')
|
||||
return 1
|
||||
if _has_unstaged_config(config_file) and not no_stash:
|
||||
if stash and _has_unstaged_config(config_file):
|
||||
logger.error(
|
||||
'Your pre-commit configuration is unstaged.\n'
|
||||
'`git add {}` to fix this.'.format(config_file),
|
||||
f'Your pre-commit configuration is unstaged.\n'
|
||||
f'`git add {config_file}` to fix this.',
|
||||
)
|
||||
return 1
|
||||
|
||||
|
|
@ -292,12 +293,10 @@ def run(
|
|||
environ['PRE_COMMIT_ORIGIN'] = args.origin
|
||||
environ['PRE_COMMIT_SOURCE'] = args.source
|
||||
|
||||
if no_stash:
|
||||
ctx = noop_context()
|
||||
else:
|
||||
ctx = staged_files_only(store.directory)
|
||||
with contextlib.ExitStack() as exit_stack:
|
||||
if stash:
|
||||
exit_stack.enter_context(staged_files_only(store.directory))
|
||||
|
||||
with ctx:
|
||||
config = load_config(config_file)
|
||||
hooks = [
|
||||
hook
|
||||
|
|
@ -308,12 +307,13 @@ def run(
|
|||
|
||||
if args.hook and not hooks:
|
||||
output.write_line(
|
||||
'No hook with id `{}` in stage `{}`'.format(
|
||||
args.hook, args.hook_stage,
|
||||
),
|
||||
f'No hook with id `{args.hook}` in stage `{args.hook_stage}`',
|
||||
)
|
||||
return 1
|
||||
|
||||
install_hook_envs(hooks, store)
|
||||
|
||||
return _run_hooks(config, hooks, args, environ)
|
||||
|
||||
# https://github.com/python/mypy/issues/7726
|
||||
raise AssertionError('unreachable')
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import argparse
|
||||
import collections
|
||||
import logging
|
||||
import os.path
|
||||
from typing import Tuple
|
||||
|
|
@ -62,8 +61,7 @@ def try_repo(args: argparse.Namespace) -> int:
|
|||
manifest = sorted(manifest, key=lambda hook: hook['id'])
|
||||
hooks = [{'id': hook['id']} for hook in manifest]
|
||||
|
||||
items = (('repo', repo), ('rev', ref), ('hooks', hooks))
|
||||
config = {'repos': [collections.OrderedDict(items)]}
|
||||
config = {'repos': [{'repo': repo, 'rev': ref, 'hooks': hooks}]}
|
||||
config_s = ordered_dump(config, **C.YAML_DUMP_KWARGS)
|
||||
|
||||
config_filename = os.path.join(tempdir, C.CONFIG_FILE)
|
||||
|
|
|
|||
|
|
@ -3,10 +3,9 @@ import os.path
|
|||
import sys
|
||||
import traceback
|
||||
from typing import Generator
|
||||
from typing import Union
|
||||
from typing import Optional
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit import output
|
||||
from pre_commit.store import Store
|
||||
|
||||
|
|
@ -15,25 +14,24 @@ class FatalError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def _to_bytes(exc: BaseException) -> bytes:
|
||||
return str(exc).encode('UTF-8')
|
||||
|
||||
|
||||
def _log_and_exit(msg: str, exc: BaseException, formatted: str) -> None:
|
||||
error_msg = b''.join((
|
||||
five.to_bytes(msg), b': ',
|
||||
five.to_bytes(type(exc).__name__), b': ',
|
||||
_to_bytes(exc),
|
||||
msg.encode(), b': ',
|
||||
type(exc).__name__.encode(), b': ',
|
||||
str(exc).encode(),
|
||||
))
|
||||
output.write_line(error_msg)
|
||||
output.write_line_b(error_msg)
|
||||
store = Store()
|
||||
log_path = os.path.join(store.directory, 'pre-commit.log')
|
||||
output.write_line(f'Check the log at {log_path}')
|
||||
|
||||
with open(log_path, 'wb') as log:
|
||||
def _log_line(s: Union[None, str, bytes] = None) -> None:
|
||||
def _log_line(s: Optional[str] = None) -> None:
|
||||
output.write_line(s, stream=log)
|
||||
|
||||
def _log_line_b(s: Optional[bytes] = None) -> None:
|
||||
output.write_line_b(s, stream=log)
|
||||
|
||||
_log_line('### version information')
|
||||
_log_line()
|
||||
_log_line('```')
|
||||
|
|
@ -50,7 +48,7 @@ def _log_and_exit(msg: str, exc: BaseException, formatted: str) -> None:
|
|||
_log_line('### error information')
|
||||
_log_line()
|
||||
_log_line('```')
|
||||
_log_line(error_msg)
|
||||
_log_line_b(error_msg)
|
||||
_log_line('```')
|
||||
_log_line()
|
||||
_log_line('```')
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
from typing import Union
|
||||
|
||||
|
||||
def to_text(s: Union[str, bytes]) -> str:
|
||||
return s if isinstance(s, str) else s.decode('UTF-8')
|
||||
|
||||
|
||||
def to_bytes(s: Union[str, bytes]) -> bytes:
|
||||
return s if isinstance(s, bytes) else s.encode('UTF-8')
|
||||
|
||||
|
||||
n = to_text
|
||||
|
|
@ -69,7 +69,7 @@ def is_in_merge_conflict() -> bool:
|
|||
def parse_merge_msg_for_conflicts(merge_msg: bytes) -> List[str]:
|
||||
# Conflicted files start with tabs
|
||||
return [
|
||||
line.lstrip(b'#').strip().decode('UTF-8')
|
||||
line.lstrip(b'#').strip().decode()
|
||||
for line in merge_msg.splitlines()
|
||||
# '#\t' for git 2.4.1
|
||||
if line.startswith((b'\t', b'#\t'))
|
||||
|
|
@ -183,13 +183,11 @@ def check_for_cygwin_mismatch() -> None:
|
|||
if is_cygwin_python ^ is_cygwin_git:
|
||||
exe_type = {True: '(cygwin)', False: '(windows)'}
|
||||
logger.warn(
|
||||
'pre-commit has detected a mix of cygwin python / git\n'
|
||||
'This combination is not supported, it is likely you will '
|
||||
'receive an error later in the program.\n'
|
||||
'Make sure to use cygwin git+python while using cygwin\n'
|
||||
'These can be installed through the cygwin installer.\n'
|
||||
' - python {}\n'
|
||||
' - git {}\n'.format(
|
||||
exe_type[is_cygwin_python], exe_type[is_cygwin_git],
|
||||
),
|
||||
f'pre-commit has detected a mix of cygwin python / git\n'
|
||||
f'This combination is not supported, it is likely you will '
|
||||
f'receive an error later in the program.\n'
|
||||
f'Make sure to use cygwin git+python while using cygwin\n'
|
||||
f'These can be installed through the cygwin installer.\n'
|
||||
f' - python {exe_type[is_cygwin_python]}\n'
|
||||
f' - git {exe_type[is_cygwin_git]}\n',
|
||||
)
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ def install_environment(
|
|||
|
||||
def get_docker_user() -> str: # pragma: windows no cover
|
||||
try:
|
||||
return '{}:{}'.format(os.getuid(), os.getgid())
|
||||
return f'{os.getuid()}:{os.getgid()}'
|
||||
except AttributeError:
|
||||
return '1000:1000'
|
||||
|
||||
|
|
@ -94,7 +94,7 @@ def docker_cmd() -> Tuple[str, ...]: # pragma: windows no cover
|
|||
# https://docs.docker.com/engine/reference/commandline/run/#mount-volumes-from-container-volumes-from
|
||||
# The `Z` option tells Docker to label the content with a private
|
||||
# unshared label. Only the current container can use a private volume.
|
||||
'-v', '{}:/src:rw,Z'.format(os.getcwd()),
|
||||
'-v', f'{os.getcwd()}:/src:rw,Z',
|
||||
'--workdir', '/src',
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,6 @@ def run_hook(
|
|||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
out = hook.entry.encode('UTF-8') + b'\n\n'
|
||||
out += b'\n'.join(f.encode('UTF-8') for f in file_args) + b'\n'
|
||||
out = hook.entry.encode() + b'\n\n'
|
||||
out += b'\n'.join(f.encode() for f in file_args) + b'\n'
|
||||
return 1, out
|
||||
|
|
|
|||
|
|
@ -51,8 +51,8 @@ def assert_no_additional_deps(
|
|||
) -> None:
|
||||
if additional_deps:
|
||||
raise AssertionError(
|
||||
'For now, pre-commit does not support '
|
||||
'additional_dependencies for {}'.format(lang),
|
||||
f'For now, pre-commit does not support '
|
||||
f'additional_dependencies for {lang}',
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -30,10 +30,10 @@ def _envdir(prefix: Prefix, version: str) -> str:
|
|||
return prefix.path(directory)
|
||||
|
||||
|
||||
def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
|
||||
def get_env_patch(venv: str) -> PatchesT:
|
||||
if sys.platform == 'cygwin': # pragma: no cover
|
||||
_, win_venv, _ = cmd_output('cygpath', '-w', venv)
|
||||
install_prefix = r'{}\bin'.format(win_venv.strip())
|
||||
install_prefix = fr'{win_venv.strip()}\bin'
|
||||
lib_dir = 'lib'
|
||||
elif sys.platform == 'win32': # pragma: no cover
|
||||
install_prefix = bin_dir(venv)
|
||||
|
|
@ -54,14 +54,14 @@ def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
|
|||
def in_env(
|
||||
prefix: Prefix,
|
||||
language_version: str,
|
||||
) -> Generator[None, None, None]: # pragma: windows no cover
|
||||
) -> Generator[None, None, None]:
|
||||
with envcontext(get_env_patch(_envdir(prefix, language_version))):
|
||||
yield
|
||||
|
||||
|
||||
def install_environment(
|
||||
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
|
||||
) -> None: # pragma: windows no cover
|
||||
) -> None:
|
||||
additional_dependencies = tuple(additional_dependencies)
|
||||
assert prefix.exists('package.json')
|
||||
envdir = _envdir(prefix, version)
|
||||
|
|
@ -91,6 +91,6 @@ def run_hook(
|
|||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
) -> Tuple[int, bytes]:
|
||||
with in_env(hook.prefix, hook.language_version):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ def _process_filename_by_line(pattern: Pattern[bytes], filename: str) -> int:
|
|||
if pattern.search(line):
|
||||
retv = 1
|
||||
output.write(f'{filename}:{line_no}:')
|
||||
output.write_line(line.rstrip(b'\r\n'))
|
||||
output.write_line_b(line.rstrip(b'\r\n'))
|
||||
return retv
|
||||
|
||||
|
||||
|
|
@ -39,12 +39,12 @@ def _process_filename_at_once(pattern: Pattern[bytes], filename: str) -> int:
|
|||
if match:
|
||||
retv = 1
|
||||
line_no = contents[:match.start()].count(b'\n')
|
||||
output.write('{}:{}:'.format(filename, line_no + 1))
|
||||
output.write(f'{filename}:{line_no + 1}:')
|
||||
|
||||
matched_lines = match.group().split(b'\n')
|
||||
matched_lines = match[0].split(b'\n')
|
||||
matched_lines[0] = contents.split(b'\n')[line_no]
|
||||
|
||||
output.write_line(b'\n'.join(matched_lines))
|
||||
output.write_line_b(b'\n'.join(matched_lines))
|
||||
return retv
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ def _find_by_py_launcher(
|
|||
version: str,
|
||||
) -> Optional[str]: # pragma: no cover (windows only)
|
||||
if version.startswith('python'):
|
||||
num = version[len('python'):]
|
||||
try:
|
||||
return cmd_output(
|
||||
'py', '-{}'.format(version[len('python'):]),
|
||||
'-c', 'import sys; print(sys.executable)',
|
||||
'py', f'-{num}', '-c', 'import sys; print(sys.executable)',
|
||||
)[1].strip()
|
||||
except CalledProcessError:
|
||||
pass
|
||||
|
|
@ -88,7 +88,7 @@ def get_default_version() -> str: # pragma: no cover (platform dependent)
|
|||
return exe
|
||||
|
||||
# Next try the `pythonX.X` executable
|
||||
exe = 'python{}.{}'.format(*sys.version_info)
|
||||
exe = f'python{sys.version_info[0]}.{sys.version_info[1]}'
|
||||
if find_executable(exe):
|
||||
return exe
|
||||
|
||||
|
|
@ -96,7 +96,8 @@ def get_default_version() -> str: # pragma: no cover (platform dependent)
|
|||
return exe
|
||||
|
||||
# Give a best-effort try for windows
|
||||
if os.path.exists(r'C:\{}\python.exe'.format(exe.replace('.', ''))):
|
||||
default_folder_name = exe.replace('.', '')
|
||||
if os.path.exists(fr'C:\{default_folder_name}\python.exe'):
|
||||
return exe
|
||||
|
||||
# We tried!
|
||||
|
|
@ -135,7 +136,8 @@ def norm_version(version: str) -> str:
|
|||
# If it is in the form pythonx.x search in the default
|
||||
# place on windows
|
||||
if version.startswith('python'):
|
||||
return r'C:\{}\python.exe'.format(version.replace('.', ''))
|
||||
default_folder_name = version.replace('.', '')
|
||||
return fr'C:\{default_folder_name}\python.exe'
|
||||
|
||||
# Otherwise assume it is a path
|
||||
return os.path.expanduser(version)
|
||||
|
|
|
|||
|
|
@ -79,29 +79,6 @@ def _install_rbenv(
|
|||
_extract_resource('ruby-download.tar.gz', plugins_dir)
|
||||
_extract_resource('ruby-build.tar.gz', plugins_dir)
|
||||
|
||||
activate_path = prefix.path(directory, 'bin', 'activate')
|
||||
with open(activate_path, 'w') as activate_file:
|
||||
# This is similar to how you would install rbenv to your home directory
|
||||
# However we do a couple things to make the executables exposed and
|
||||
# configure it to work in our directory.
|
||||
# We also modify the PS1 variable for manual debugging sake.
|
||||
activate_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
"export RBENV_ROOT='{directory}'\n"
|
||||
'export PATH="$RBENV_ROOT/bin:$PATH"\n'
|
||||
'eval "$(rbenv init -)"\n'
|
||||
'export PS1="(rbenv)$PS1"\n'
|
||||
# This lets us install gems in an isolated and repeatable
|
||||
# directory
|
||||
"export GEM_HOME='{directory}/gems'\n"
|
||||
'export PATH="$GEM_HOME/bin:$PATH"\n'
|
||||
'\n'.format(directory=prefix.path(directory)),
|
||||
)
|
||||
|
||||
# If we aren't using the system ruby, add a version here
|
||||
if version != C.DEFAULT:
|
||||
activate_file.write(f'export RBENV_VERSION="{version}"\n')
|
||||
|
||||
|
||||
def _install_ruby(
|
||||
prefix: Prefix,
|
||||
|
|
|
|||
|
|
@ -21,16 +21,12 @@ class LoggingHandler(logging.Handler):
|
|||
self.use_color = use_color
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
output.write_line(
|
||||
'{} {}'.format(
|
||||
color.format_color(
|
||||
f'[{record.levelname}]',
|
||||
LOG_LEVEL_COLORS[record.levelname],
|
||||
self.use_color,
|
||||
),
|
||||
record.getMessage(),
|
||||
),
|
||||
level_msg = color.format_color(
|
||||
f'[{record.levelname}]',
|
||||
LOG_LEVEL_COLORS[record.levelname],
|
||||
self.use_color,
|
||||
)
|
||||
output.write_line(f'{level_msg} {record.getMessage()}')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from typing import Union
|
|||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import color
|
||||
from pre_commit import five
|
||||
from pre_commit import git
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.clean import clean
|
||||
|
|
@ -155,7 +154,6 @@ def _adjust_args_and_chdir(args: argparse.Namespace) -> None:
|
|||
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
argv = argv if argv is not None else sys.argv[1:]
|
||||
argv = [five.to_text(arg) for arg in argv]
|
||||
parser = argparse.ArgumentParser(prog='pre-commit')
|
||||
|
||||
# https://stackoverflow.com/a/8521644/812183
|
||||
|
|
|
|||
|
|
@ -34,8 +34,7 @@ def check_useless_excludes(config_file: str) -> int:
|
|||
exclude = config['exclude']
|
||||
if not exclude_matches_any(classifier.filenames, '', exclude):
|
||||
print(
|
||||
'The global exclude pattern {!r} does not match any files'
|
||||
.format(exclude),
|
||||
f'The global exclude pattern {exclude!r} does not match any files',
|
||||
)
|
||||
retv = 1
|
||||
|
||||
|
|
@ -50,8 +49,8 @@ def check_useless_excludes(config_file: str) -> int:
|
|||
include, exclude = hook['files'], hook['exclude']
|
||||
if not exclude_matches_any(names, include, exclude):
|
||||
print(
|
||||
'The exclude pattern {!r} for {} does not match any files'
|
||||
.format(exclude, hook['id']),
|
||||
f'The exclude pattern {exclude!r} for {hook["id"]} does '
|
||||
f'not match any files',
|
||||
)
|
||||
retv = 1
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import contextlib
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import IO
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit import five
|
||||
|
||||
|
||||
def get_hook_message(
|
||||
|
|
@ -54,21 +53,18 @@ def get_hook_message(
|
|||
assert end_msg is not None
|
||||
assert end_color is not None
|
||||
assert use_color is not None
|
||||
return '{}{}{}{}\n'.format(
|
||||
start,
|
||||
'.' * (cols - len(start) - len(postfix) - len(end_msg) - 1),
|
||||
postfix,
|
||||
color.format_color(end_msg, end_color, use_color),
|
||||
)
|
||||
dots = '.' * (cols - len(start) - len(postfix) - len(end_msg) - 1)
|
||||
end = color.format_color(end_msg, end_color, use_color)
|
||||
return f'{start}{dots}{postfix}{end}\n'
|
||||
|
||||
|
||||
def write(s: str, stream: IO[bytes] = sys.stdout.buffer) -> None:
|
||||
stream.write(five.to_bytes(s))
|
||||
stream.write(s.encode())
|
||||
stream.flush()
|
||||
|
||||
|
||||
def write_line(
|
||||
s: Union[None, str, bytes] = None,
|
||||
def write_line_b(
|
||||
s: Optional[bytes] = None,
|
||||
stream: IO[bytes] = sys.stdout.buffer,
|
||||
logfile_name: Optional[str] = None,
|
||||
) -> None:
|
||||
|
|
@ -80,6 +76,10 @@ def write_line(
|
|||
|
||||
for output_stream in output_streams:
|
||||
if s is not None:
|
||||
output_stream.write(five.to_bytes(s))
|
||||
output_stream.write(s)
|
||||
output_stream.write(b'\n')
|
||||
output_stream.flush()
|
||||
|
||||
|
||||
def write_line(s: Optional[str] = None, **kwargs: Any) -> None:
|
||||
write_line_b(s.encode() if s is not None else s, **kwargs)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from identify.identify import parse_shebang_from_file
|
|||
|
||||
class ExecutableNotFoundError(OSError):
|
||||
def to_output(self) -> Tuple[int, bytes, None]:
|
||||
return (1, self.args[0].encode('UTF-8'), None)
|
||||
return (1, self.args[0].encode(), None)
|
||||
|
||||
|
||||
def parse_filename(filename: str) -> Tuple[str, ...]:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from typing import Set
|
|||
from typing import Tuple
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.clientlib import LOCAL
|
||||
from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
||||
|
|
@ -49,7 +48,7 @@ def _write_state(prefix: Prefix, venv: str, state: object) -> None:
|
|||
state_filename = _state_filename(prefix, venv)
|
||||
staging = state_filename + 'staging'
|
||||
with open(staging, 'w') as state_file:
|
||||
state_file.write(five.to_text(json.dumps(state)))
|
||||
state_file.write(json.dumps(state))
|
||||
# Move the file into place atomically to indicate we've installed
|
||||
os.rename(staging, state_filename)
|
||||
|
||||
|
|
@ -137,8 +136,8 @@ class Hook(NamedTuple):
|
|||
extra_keys = set(dct) - set(_KEYS)
|
||||
if extra_keys:
|
||||
logger.warning(
|
||||
'Unexpected key(s) present on {} => {}: '
|
||||
'{}'.format(src, dct['id'], ', '.join(sorted(extra_keys))),
|
||||
f'Unexpected key(s) present on {src} => {dct["id"]}: '
|
||||
f'{", ".join(sorted(extra_keys))}',
|
||||
)
|
||||
return cls(src=src, prefix=prefix, **{k: dct[k] for k in _KEYS})
|
||||
|
||||
|
|
@ -154,11 +153,9 @@ def _hook(
|
|||
version = ret['minimum_pre_commit_version']
|
||||
if parse_version(version) > parse_version(C.VERSION):
|
||||
logger.error(
|
||||
'The hook `{}` requires pre-commit version {} but version {} '
|
||||
'is installed. '
|
||||
'Perhaps run `pip install --upgrade pre-commit`.'.format(
|
||||
ret['id'], version, C.VERSION,
|
||||
),
|
||||
f'The hook `{ret["id"]}` requires pre-commit version {version} '
|
||||
f'but version {C.VERSION} is installed. '
|
||||
f'Perhaps run `pip install --upgrade pre-commit`.',
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
|
@ -210,10 +207,9 @@ def _cloned_repository_hooks(
|
|||
for hook in repo_config['hooks']:
|
||||
if hook['id'] not in by_id:
|
||||
logger.error(
|
||||
'`{}` is not present in repository {}. '
|
||||
'Typo? Perhaps it is introduced in a newer version? '
|
||||
'Often `pre-commit autoupdate` fixes this.'
|
||||
.format(hook['id'], repo),
|
||||
f'`{hook["id"]}` is not present in repository {repo}. '
|
||||
f'Typo? Perhaps it is introduced in a newer version? '
|
||||
f'Often `pre-commit autoupdate` fixes this.',
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ def _norm_exe(exe: str) -> Tuple[str, ...]:
|
|||
if f.read(2) != b'#!':
|
||||
return ()
|
||||
try:
|
||||
first_line = f.readline().decode('UTF-8')
|
||||
first_line = f.readline().decode()
|
||||
except UnicodeDecodeError:
|
||||
return ()
|
||||
|
||||
|
|
@ -52,12 +52,11 @@ def _norm_exe(exe: str) -> Tuple[str, ...]:
|
|||
def _run_legacy() -> Tuple[int, bytes]:
|
||||
if __file__.endswith('.legacy'):
|
||||
raise SystemExit(
|
||||
"bug: pre-commit's script is installed in migration mode\n"
|
||||
'run `pre-commit install -f --hook-type {}` to fix this\n\n'
|
||||
'Please report this bug at '
|
||||
'https://github.com/pre-commit/pre-commit/issues'.format(
|
||||
HOOK_TYPE,
|
||||
),
|
||||
f"bug: pre-commit's script is installed in migration mode\n"
|
||||
f'run `pre-commit install -f --hook-type {HOOK_TYPE}` to fix '
|
||||
f'this\n\n'
|
||||
f'Please report this bug at '
|
||||
f'https://github.com/pre-commit/pre-commit/issues',
|
||||
)
|
||||
|
||||
if HOOK_TYPE == 'pre-push':
|
||||
|
|
@ -77,25 +76,22 @@ def _run_legacy() -> Tuple[int, bytes]:
|
|||
|
||||
def _validate_config() -> None:
|
||||
cmd = ('git', 'rev-parse', '--show-toplevel')
|
||||
top_level = subprocess.check_output(cmd).decode('UTF-8').strip()
|
||||
top_level = subprocess.check_output(cmd).decode().strip()
|
||||
cfg = os.path.join(top_level, CONFIG)
|
||||
if os.path.isfile(cfg):
|
||||
pass
|
||||
elif SKIP_ON_MISSING_CONFIG or os.getenv('PRE_COMMIT_ALLOW_NO_CONFIG'):
|
||||
print(
|
||||
'`{}` config file not found. '
|
||||
'Skipping `pre-commit`.'.format(CONFIG),
|
||||
)
|
||||
print(f'`{CONFIG}` config file not found. Skipping `pre-commit`.')
|
||||
raise EarlyExit()
|
||||
else:
|
||||
raise FatalError(
|
||||
'No {} file was found\n'
|
||||
'- To temporarily silence this, run '
|
||||
'`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
|
||||
'- To permanently silence this, install pre-commit with the '
|
||||
'--allow-missing-config option\n'
|
||||
'- To uninstall pre-commit run '
|
||||
'`pre-commit uninstall`'.format(CONFIG),
|
||||
f'No {CONFIG} file was found\n'
|
||||
f'- To temporarily silence this, run '
|
||||
f'`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
|
||||
f'- To permanently silence this, install pre-commit with the '
|
||||
f'--allow-missing-config option\n'
|
||||
f'- To uninstall pre-commit run '
|
||||
f'`pre-commit uninstall`',
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -127,7 +123,7 @@ def _pre_push(stdin: bytes) -> Tuple[str, ...]:
|
|||
remote = sys.argv[1]
|
||||
|
||||
opts: Tuple[str, ...] = ()
|
||||
for line in stdin.decode('UTF-8').splitlines():
|
||||
for line in stdin.decode().splitlines():
|
||||
_, local_sha, _, remote_sha = line.split()
|
||||
if local_sha == Z40:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from pre_commit import git
|
|||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import mkdirp
|
||||
from pre_commit.xargs import xargs
|
||||
|
||||
|
||||
|
|
@ -48,14 +47,14 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
|
|||
retcode=None,
|
||||
)
|
||||
if retcode and diff_stdout_binary.strip():
|
||||
patch_filename = 'patch{}'.format(int(time.time()))
|
||||
patch_filename = f'patch{int(time.time())}'
|
||||
patch_filename = os.path.join(patch_dir, patch_filename)
|
||||
logger.warning('Unstaged files detected.')
|
||||
logger.info(
|
||||
f'Stashing unstaged files to {patch_filename}.',
|
||||
)
|
||||
# Save the current unstaged changes as a patch
|
||||
mkdirp(patch_dir)
|
||||
os.makedirs(patch_dir, exist_ok=True)
|
||||
with open(patch_filename, 'wb') as patch_file:
|
||||
patch_file.write(diff_stdout_binary)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ from pre_commit import git
|
|||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import mkdirp
|
||||
from pre_commit.util import resource_text
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
|
|
@ -45,7 +44,7 @@ class Store:
|
|||
self.db_path = os.path.join(self.directory, 'db.db')
|
||||
|
||||
if not os.path.exists(self.directory):
|
||||
mkdirp(self.directory)
|
||||
os.makedirs(self.directory, exist_ok=True)
|
||||
with open(os.path.join(self.directory, 'README'), 'w') as f:
|
||||
f.write(
|
||||
'This directory is maintained by the pre-commit project.\n'
|
||||
|
|
@ -102,7 +101,7 @@ class Store:
|
|||
@classmethod
|
||||
def db_repo_name(cls, repo: str, deps: Sequence[str]) -> str:
|
||||
if deps:
|
||||
return '{}:{}'.format(repo, ','.join(sorted(deps)))
|
||||
return f'{repo}:{",".join(sorted(deps))}'
|
||||
else:
|
||||
return repo
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ from typing import Tuple
|
|||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from pre_commit import five
|
||||
from pre_commit import parse_shebang
|
||||
|
||||
if sys.version_info >= (3, 7): # pragma: no cover (PY37+)
|
||||
|
|
@ -30,14 +29,6 @@ else: # pragma: no cover (<PY37)
|
|||
EnvironT = Union[Dict[str, str], 'os._Environ']
|
||||
|
||||
|
||||
def mkdirp(path: str) -> None:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError:
|
||||
if not os.path.exists(path):
|
||||
raise
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def clean_path_on_failure(path: str) -> Generator[None, None, None]:
|
||||
"""Cleans up the directory on an exceptional failure."""
|
||||
|
|
@ -49,11 +40,6 @@ def clean_path_on_failure(path: str) -> Generator[None, None, None]:
|
|||
raise
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def noop_context() -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tmpdir() -> Generator[str, None, None]:
|
||||
"""Contextmanager to create a temporary directory. It will be cleaned up
|
||||
|
|
@ -76,9 +62,8 @@ def resource_text(filename: str) -> str:
|
|||
|
||||
def make_executable(filename: str) -> None:
|
||||
original_mode = os.stat(filename).st_mode
|
||||
os.chmod(
|
||||
filename, original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
|
||||
)
|
||||
new_mode = original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
os.chmod(filename, new_mode)
|
||||
|
||||
|
||||
class CalledProcessError(RuntimeError):
|
||||
|
|
@ -105,40 +90,28 @@ class CalledProcessError(RuntimeError):
|
|||
return b' (none)'
|
||||
|
||||
return b''.join((
|
||||
'command: {!r}\n'
|
||||
'return code: {}\n'
|
||||
'expected return code: {}\n'.format(
|
||||
self.cmd, self.returncode, self.expected_returncode,
|
||||
).encode('UTF-8'),
|
||||
f'command: {self.cmd!r}\n'.encode(),
|
||||
f'return code: {self.returncode}\n'.encode(),
|
||||
f'expected return code: {self.expected_returncode}\n'.encode(),
|
||||
b'stdout:', _indent_or_none(self.stdout), b'\n',
|
||||
b'stderr:', _indent_or_none(self.stderr),
|
||||
))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__bytes__().decode('UTF-8')
|
||||
return self.__bytes__().decode()
|
||||
|
||||
|
||||
def _cmd_kwargs(
|
||||
*cmd: str,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[Tuple[str, ...], Dict[str, Any]]:
|
||||
# py2/py3 on windows are more strict about the types here
|
||||
cmd = tuple(five.n(arg) for arg in cmd)
|
||||
kwargs['env'] = {
|
||||
five.n(key): five.n(value)
|
||||
for key, value in kwargs.pop('env', {}).items()
|
||||
} or None
|
||||
def _setdefault_kwargs(kwargs: Dict[str, Any]) -> None:
|
||||
for arg in ('stdin', 'stdout', 'stderr'):
|
||||
kwargs.setdefault(arg, subprocess.PIPE)
|
||||
return cmd, kwargs
|
||||
|
||||
|
||||
def cmd_output_b(
|
||||
*cmd: str,
|
||||
retcode: Optional[int] = 0,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes, Optional[bytes]]:
|
||||
retcode = kwargs.pop('retcode', 0)
|
||||
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
|
||||
_setdefault_kwargs(kwargs)
|
||||
|
||||
try:
|
||||
cmd = parse_shebang.normalize_cmd(cmd)
|
||||
|
|
@ -157,8 +130,8 @@ def cmd_output_b(
|
|||
|
||||
def cmd_output(*cmd: str, **kwargs: Any) -> Tuple[int, str, Optional[str]]:
|
||||
returncode, stdout_b, stderr_b = cmd_output_b(*cmd, **kwargs)
|
||||
stdout = stdout_b.decode('UTF-8') if stdout_b is not None else None
|
||||
stderr = stderr_b.decode('UTF-8') if stderr_b is not None else None
|
||||
stdout = stdout_b.decode() if stdout_b is not None else None
|
||||
stderr = stderr_b.decode() if stderr_b is not None else None
|
||||
return returncode, stdout, stderr
|
||||
|
||||
|
||||
|
|
@ -203,11 +176,12 @@ if os.name != 'nt': # pragma: windows no cover
|
|||
|
||||
def cmd_output_p(
|
||||
*cmd: str,
|
||||
retcode: Optional[int] = 0,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes, Optional[bytes]]:
|
||||
assert kwargs.pop('retcode') is None
|
||||
assert retcode is None
|
||||
assert kwargs['stderr'] == subprocess.STDOUT, kwargs['stderr']
|
||||
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
|
||||
_setdefault_kwargs(kwargs)
|
||||
|
||||
try:
|
||||
cmd = parse_shebang.normalize_cmd(cmd)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,6 @@ def _command_length(*cmd: str) -> int:
|
|||
# win32 uses the amount of characters, more details at:
|
||||
# https://github.com/pre-commit/pre-commit/pull/839
|
||||
if sys.platform == 'win32':
|
||||
# the python2.x apis require bytes, we encode as UTF-8
|
||||
return len(full_cmd.encode('utf-16le')) // 2
|
||||
else:
|
||||
return len(full_cmd.encode(sys.getfilesystemencoding()))
|
||||
|
|
@ -118,6 +117,10 @@ def _thread_mapper(maxsize: int) -> Generator[
|
|||
def xargs(
|
||||
cmd: Tuple[str, ...],
|
||||
varargs: Sequence[str],
|
||||
*,
|
||||
color: bool = False,
|
||||
target_concurrency: int = 1,
|
||||
_max_length: int = _get_platform_max_length(),
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes]:
|
||||
"""A simplified implementation of xargs.
|
||||
|
|
@ -125,9 +128,6 @@ def xargs(
|
|||
color: Make a pty if on a platform that supports it
|
||||
target_concurrency: Target number of partitions to run concurrently
|
||||
"""
|
||||
color = kwargs.pop('color', False)
|
||||
target_concurrency = kwargs.pop('target_concurrency', 1)
|
||||
max_length = kwargs.pop('_max_length', _get_platform_max_length())
|
||||
cmd_fn = cmd_output_p if color else cmd_output_b
|
||||
retcode = 0
|
||||
stdout = b''
|
||||
|
|
@ -137,7 +137,7 @@ def xargs(
|
|||
except parse_shebang.ExecutableNotFoundError as e:
|
||||
return e.to_output()[:2]
|
||||
|
||||
partitions = partition(cmd, varargs, target_concurrency, max_length)
|
||||
partitions = partition(cmd, varargs, target_concurrency, _max_length)
|
||||
|
||||
def run_cmd_partition(
|
||||
run_cmd: Tuple[str, ...],
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
# Intentionally write mixed encoding to the output. This should not crash
|
||||
# pre-commit and should write bytes to the output.
|
||||
# '☃'.encode('UTF-8') + '²'.encode('latin1')
|
||||
# '☃'.encode() + '²'.encode('latin1')
|
||||
echo -e '\xe2\x98\x83\xb2'
|
||||
# exit 1 to trigger printing
|
||||
exit 1
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
for i in range(6):
|
||||
f = sys.stdout if i % 2 == 0 else sys.stderr
|
||||
f.write(f'{i}\n')
|
||||
f.flush()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
#!/usr/bin/env bash
|
||||
echo 0
|
||||
echo 1 1>&2
|
||||
echo 2
|
||||
echo 3 1>&2
|
||||
echo 4
|
||||
echo 5 1>&2
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
print('stdin: {}'.format(sys.stdin.isatty()))
|
||||
print('stdout: {}'.format(sys.stdout.isatty()))
|
||||
print('stderr: {}'.format(sys.stderr.isatty()))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
#!/usr/bin/env bash
|
||||
t() {
|
||||
if [ -t "$1" ]; then
|
||||
echo "$2: True"
|
||||
else
|
||||
echo "$2: False"
|
||||
fi
|
||||
}
|
||||
t 0 stdin
|
||||
t 1 stdout
|
||||
t 2 stderr
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import contextlib
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -18,13 +17,15 @@ def get_resource_path(path):
|
|||
return os.path.join(TESTING_DIR, 'resources', path)
|
||||
|
||||
|
||||
def cmd_output_mocked_pre_commit_home(*args, **kwargs):
|
||||
# keyword-only argument
|
||||
tempdir_factory = kwargs.pop('tempdir_factory')
|
||||
pre_commit_home = kwargs.pop('pre_commit_home', tempdir_factory.get())
|
||||
def cmd_output_mocked_pre_commit_home(
|
||||
*args, tempdir_factory, pre_commit_home=None, env=None, **kwargs,
|
||||
):
|
||||
if pre_commit_home is None:
|
||||
pre_commit_home = tempdir_factory.get()
|
||||
env = env if env is not None else os.environ
|
||||
kwargs.setdefault('stderr', subprocess.STDOUT)
|
||||
# Don't want to write to the home directory
|
||||
env = dict(kwargs.pop('env', os.environ), PRE_COMMIT_HOME=pre_commit_home)
|
||||
env = dict(env, PRE_COMMIT_HOME=pre_commit_home)
|
||||
ret, out, _ = cmd_output(*args, env=env, **kwargs)
|
||||
return ret, out.replace('\r\n', '\n'), None
|
||||
|
||||
|
|
@ -44,33 +45,6 @@ xfailif_windows_no_ruby = pytest.mark.xfail(
|
|||
xfailif_windows = pytest.mark.xfail(os.name == 'nt', reason='windows')
|
||||
|
||||
|
||||
def broken_deep_listdir(): # pragma: no cover (platform specific)
|
||||
if sys.platform != 'win32':
|
||||
return False
|
||||
try:
|
||||
os.listdir('\\\\?\\' + os.path.abspath('.'))
|
||||
except OSError:
|
||||
return True
|
||||
try:
|
||||
os.listdir(b'\\\\?\\C:' + b'\\' * 300)
|
||||
except TypeError:
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
xfailif_broken_deep_listdir = pytest.mark.xfail(
|
||||
broken_deep_listdir(),
|
||||
reason='Node on windows requires deep listdir',
|
||||
)
|
||||
|
||||
|
||||
xfailif_no_symlink = pytest.mark.xfail(
|
||||
not hasattr(os, 'symlink'),
|
||||
reason='Symlink is not supported on this platform',
|
||||
)
|
||||
|
||||
|
||||
def supports_venv(): # pragma: no cover (platform specific)
|
||||
try:
|
||||
__import__('ensurepip')
|
||||
|
|
@ -123,9 +97,7 @@ def cwd(path):
|
|||
os.chdir(original_cwd)
|
||||
|
||||
|
||||
def git_commit(*args, **kwargs):
|
||||
fn = kwargs.pop('fn', cmd_output)
|
||||
msg = kwargs.pop('msg', 'commit!')
|
||||
def git_commit(*args, fn=cmd_output, msg='commit!', **kwargs):
|
||||
kwargs.setdefault('stderr', subprocess.STDOUT)
|
||||
|
||||
cmd = ('git', 'commit', '--allow-empty', '--no-gpg-sign', '-a') + args
|
||||
|
|
|
|||
|
|
@ -291,13 +291,11 @@ def test_minimum_pre_commit_version_failing():
|
|||
cfg = {'repos': [], 'minimum_pre_commit_version': '999'}
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
assert str(excinfo.value) == (
|
||||
'\n'
|
||||
'==> At Config()\n'
|
||||
'==> At key: minimum_pre_commit_version\n'
|
||||
'=====> pre-commit version 999 is required but version {} is '
|
||||
'installed. Perhaps run `pip install --upgrade pre-commit`.'.format(
|
||||
C.VERSION,
|
||||
)
|
||||
f'\n'
|
||||
f'==> At Config()\n'
|
||||
f'==> At key: minimum_pre_commit_version\n'
|
||||
f'=====> pre-commit version 999 is required but version {C.VERSION} '
|
||||
f'is installed. Perhaps run `pip install --upgrade pre-commit`.'
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import pipes
|
||||
import shlex
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -118,12 +118,12 @@ def test_rev_info_update_does_not_freeze_if_already_sha(out_of_date):
|
|||
|
||||
def test_autoupdate_up_to_date_repo(up_to_date, tmpdir, store):
|
||||
contents = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
).format(up_to_date, git.head_rev(up_to_date))
|
||||
f'repos:\n'
|
||||
f'- repo: {up_to_date}\n'
|
||||
f' rev: {git.head_rev(up_to_date)}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
|
||||
|
|
@ -278,7 +278,7 @@ def test_loses_formatting_when_not_detectable(out_of_date, store, tmpdir):
|
|||
' ],\n'
|
||||
' }}\n'
|
||||
']\n'.format(
|
||||
pipes.quote(out_of_date.path), out_of_date.original_rev,
|
||||
shlex.quote(out_of_date.path), out_of_date.original_rev,
|
||||
)
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
|
|
@ -286,12 +286,12 @@ def test_loses_formatting_when_not_detectable(out_of_date, store, tmpdir):
|
|||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
expected = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
).format(out_of_date.path, out_of_date.head_rev)
|
||||
f'repos:\n'
|
||||
f'- repo: {out_of_date.path}\n'
|
||||
f' rev: {out_of_date.head_rev}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
assert cfg.read() == expected
|
||||
|
||||
|
||||
|
|
@ -358,12 +358,12 @@ def test_hook_disppearing_repo_raises(hook_disappearing, store):
|
|||
|
||||
def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir, store):
|
||||
contents = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
).format(hook_disappearing.path, hook_disappearing.original_rev)
|
||||
f'repos:\n'
|
||||
f'- repo: {hook_disappearing.path}\n'
|
||||
f' rev: {hook_disappearing.original_rev}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from pre_commit.commands.install_uninstall import uninstall
|
|||
from pre_commit.parse_shebang import find_executable
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import mkdirp
|
||||
from pre_commit.util import resource_text
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import make_consuming_repo
|
||||
|
|
@ -22,7 +21,6 @@ from testing.fixtures import remove_config_from_repo
|
|||
from testing.util import cmd_output_mocked_pre_commit_home
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
from testing.util import xfailif_no_symlink
|
||||
from testing.util import xfailif_windows
|
||||
|
||||
|
||||
|
|
@ -52,11 +50,11 @@ def test_shebang_posix_not_on_path():
|
|||
|
||||
|
||||
def test_shebang_posix_on_path(tmpdir):
|
||||
tmpdir.join('python{}'.format(sys.version_info[0])).ensure()
|
||||
tmpdir.join(f'python{sys.version_info[0]}').ensure()
|
||||
|
||||
with mock.patch.object(sys, 'platform', 'posix'):
|
||||
with mock.patch.object(os, 'defpath', tmpdir.strpath):
|
||||
expected = '#!/usr/bin/env python{}'.format(sys.version_info[0])
|
||||
expected = f'#!/usr/bin/env python{sys.version_info[0]}'
|
||||
assert shebang() == expected
|
||||
|
||||
|
||||
|
|
@ -90,7 +88,6 @@ def test_install_refuses_core_hookspath(in_git_dir, store):
|
|||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
|
||||
|
||||
@xfailif_no_symlink # pragma: windows no cover
|
||||
def test_install_hooks_dead_symlink(in_git_dir, store):
|
||||
hook = in_git_dir.join('.git/hooks').ensure_dir().join('pre-commit')
|
||||
os.symlink('/fake/baz', hook.strpath)
|
||||
|
|
@ -307,7 +304,7 @@ EXISTING_COMMIT_RUN = re.compile(
|
|||
|
||||
|
||||
def _write_legacy_hook(path):
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write('#!/usr/bin/env bash\necho "legacy hook"\n')
|
||||
make_executable(f.name)
|
||||
|
|
@ -370,7 +367,7 @@ def test_failing_existing_hook_returns_1(tempdir_factory, store):
|
|||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
# Write out a failing "old" hook
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n')
|
||||
make_executable(f.name)
|
||||
|
|
@ -432,7 +429,7 @@ def test_replace_old_commit_script(tempdir_factory, store):
|
|||
CURRENT_HASH, PRIOR_HASHES[-1],
|
||||
)
|
||||
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write(new_contents)
|
||||
make_executable(f.name)
|
||||
|
|
@ -609,7 +606,7 @@ def test_pre_push_legacy(tempdir_factory, store):
|
|||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
|
||||
f.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
|
|
@ -658,7 +655,7 @@ def test_commit_msg_integration_passing(
|
|||
|
||||
def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
|
||||
hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg')
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
os.makedirs(os.path.dirname(hook_path), exist_ok=True)
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
|
|
@ -713,7 +710,7 @@ def test_prepare_commit_msg_legacy(
|
|||
hook_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg',
|
||||
)
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
os.makedirs(os.path.dirname(hook_path), exist_ok=True)
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os.path
|
||||
import pipes
|
||||
import shlex
|
||||
import sys
|
||||
import time
|
||||
from unittest import mock
|
||||
|
|
@ -27,7 +27,6 @@ from testing.util import cmd_output_mocked_pre_commit_home
|
|||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
from testing.util import run_opts
|
||||
from testing.util import xfailif_no_symlink
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
@ -580,8 +579,7 @@ def test_lots_of_files(store, tempdir_factory):
|
|||
|
||||
# Write a crap ton of files
|
||||
for i in range(400):
|
||||
filename = '{}{}'.format('a' * 100, i)
|
||||
open(filename, 'w').close()
|
||||
open(f'{"a" * 100}{i}', 'w').close()
|
||||
|
||||
cmd_output('git', 'add', '.')
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
|
|
@ -673,7 +671,7 @@ def test_local_hook_passes(cap_out, store, repo_with_passing_hook):
|
|||
'id': 'identity-copy',
|
||||
'name': 'identity-copy',
|
||||
'entry': '{} -m pre_commit.meta_hooks.identity'.format(
|
||||
pipes.quote(sys.executable),
|
||||
shlex.quote(sys.executable),
|
||||
),
|
||||
'language': 'system',
|
||||
'files': r'\.py$',
|
||||
|
|
@ -862,7 +860,6 @@ def test_include_exclude_base_case(some_filenames):
|
|||
]
|
||||
|
||||
|
||||
@xfailif_no_symlink # pragma: windows no cover
|
||||
def test_matches_broken_symlink(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
os.symlink('does-not-exist', 'link')
|
||||
|
|
@ -893,7 +890,7 @@ def test_args_hook_only(cap_out, store, repo_with_passing_hook):
|
|||
'id': 'identity-copy',
|
||||
'name': 'identity-copy',
|
||||
'entry': '{} -m pre_commit.meta_hooks.identity'.format(
|
||||
pipes.quote(sys.executable),
|
||||
shlex.quote(sys.executable),
|
||||
),
|
||||
'language': 'system',
|
||||
'files': r'\.py$',
|
||||
|
|
|
|||
|
|
@ -249,17 +249,16 @@ class Fixture:
|
|||
|
||||
def get(self):
|
||||
"""Get the output assuming it was written as UTF-8 bytes"""
|
||||
return self.get_bytes().decode('UTF-8')
|
||||
return self.get_bytes().decode()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cap_out():
|
||||
stream = FakeStream()
|
||||
write = functools.partial(output.write, stream=stream)
|
||||
write_line = functools.partial(output.write_line, stream=stream)
|
||||
with mock.patch.object(output, 'write', write):
|
||||
with mock.patch.object(output, 'write_line', write_line):
|
||||
yield Fixture(stream)
|
||||
write_line_b = functools.partial(output.write_line_b, stream=stream)
|
||||
with mock.patch.multiple(output, write=write, write_line_b=write_line_b):
|
||||
yield Fixture(stream)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
|||
|
|
@ -8,12 +8,7 @@ from pre_commit.envcontext import UNSET
|
|||
from pre_commit.envcontext import Var
|
||||
|
||||
|
||||
def _test(**kwargs):
|
||||
before = kwargs.pop('before')
|
||||
patch = kwargs.pop('patch')
|
||||
expected = kwargs.pop('expected')
|
||||
assert not kwargs
|
||||
|
||||
def _test(*, before, patch, expected):
|
||||
env = before.copy()
|
||||
with envcontext(patch, _env=env):
|
||||
assert env == expected
|
||||
|
|
|
|||
|
|
@ -99,9 +99,7 @@ def test_log_and_exit(cap_out, mock_store_dir):
|
|||
|
||||
printed = cap_out.get()
|
||||
log_file = os.path.join(mock_store_dir, 'pre-commit.log')
|
||||
assert printed == (
|
||||
'msg: FatalError: hai\n' 'Check the log at {}\n'.format(log_file)
|
||||
)
|
||||
assert printed == f'msg: FatalError: hai\nCheck the log at {log_file}\n'
|
||||
|
||||
assert os.path.exists(log_file)
|
||||
with open(log_file) as f:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os.path
|
||||
import pipes
|
||||
|
||||
from pre_commit.languages.ruby import _install_rbenv
|
||||
from pre_commit.languages import ruby
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.util import xfailif_windows_no_ruby
|
||||
|
|
@ -10,31 +9,20 @@ from testing.util import xfailif_windows_no_ruby
|
|||
@xfailif_windows_no_ruby
|
||||
def test_install_rbenv(tempdir_factory):
|
||||
prefix = Prefix(tempdir_factory.get())
|
||||
_install_rbenv(prefix)
|
||||
ruby._install_rbenv(prefix)
|
||||
# Should have created rbenv directory
|
||||
assert os.path.exists(prefix.path('rbenv-default'))
|
||||
# We should have created our `activate` script
|
||||
activate_path = prefix.path('rbenv-default', 'bin', 'activate')
|
||||
assert os.path.exists(activate_path)
|
||||
|
||||
# Should be able to activate using our script and access rbenv
|
||||
cmd_output(
|
||||
'bash', '-c',
|
||||
'. {} && rbenv --help'.format(
|
||||
pipes.quote(prefix.path('rbenv-default', 'bin', 'activate')),
|
||||
),
|
||||
)
|
||||
with ruby.in_env(prefix, 'default'):
|
||||
cmd_output('rbenv', '--help')
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_install_rbenv_with_version(tempdir_factory):
|
||||
prefix = Prefix(tempdir_factory.get())
|
||||
_install_rbenv(prefix, version='1.9.3p547')
|
||||
ruby._install_rbenv(prefix, version='1.9.3p547')
|
||||
|
||||
# Should be able to activate and use rbenv install
|
||||
cmd_output(
|
||||
'bash', '-c',
|
||||
'. {} && rbenv install --help'.format(
|
||||
pipes.quote(prefix.path('rbenv-1.9.3p547', 'bin', 'activate')),
|
||||
),
|
||||
)
|
||||
with ruby.in_env(prefix, '1.9.3p547'):
|
||||
cmd_output('rbenv', 'install', '--help')
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ def test_file_doesnt_exist():
|
|||
|
||||
def test_simple_case(tmpdir):
|
||||
x = tmpdir.join('f')
|
||||
x.write_text('#!/usr/bin/env echo', encoding='UTF-8')
|
||||
x.write('#!/usr/bin/env echo')
|
||||
make_executable(x.strpath)
|
||||
assert parse_shebang.parse_filename(x.strpath) == ('echo',)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ import cfgv
|
|||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.envcontext import envcontext
|
||||
|
|
@ -33,7 +32,6 @@ from testing.util import cwd
|
|||
from testing.util import get_resource_path
|
||||
from testing.util import skipif_cant_run_docker
|
||||
from testing.util import skipif_cant_run_swift
|
||||
from testing.util import xfailif_broken_deep_listdir
|
||||
from testing.util import xfailif_no_venv
|
||||
from testing.util import xfailif_windows_no_ruby
|
||||
|
||||
|
|
@ -119,7 +117,7 @@ def test_python_hook(tempdir_factory, store):
|
|||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
b"['" + five.to_bytes(os.devnull) + b"']\nHello World\n",
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -154,7 +152,7 @@ def test_python_hook_weird_setup_cfg(in_git_dir, tempdir_factory, store):
|
|||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
b"['" + five.to_bytes(os.devnull) + b"']\nHello World\n",
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -163,7 +161,7 @@ def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
|
|||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_venv_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
b"['" + five.to_bytes(os.devnull) + b"']\nHello World\n",
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -188,7 +186,7 @@ def test_versioned_python_hook(tempdir_factory, store):
|
|||
tempdir_factory, store, 'python3_hooks_repo',
|
||||
'python3-hook',
|
||||
[os.devnull],
|
||||
b"3\n['" + five.to_bytes(os.devnull) + b"']\nHello World\n",
|
||||
f'3\n[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -231,7 +229,6 @@ def test_run_a_docker_image_hook(tempdir_factory, store, hook_id):
|
|||
)
|
||||
|
||||
|
||||
@xfailif_broken_deep_listdir
|
||||
def test_run_a_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_hooks_repo',
|
||||
|
|
@ -239,7 +236,6 @@ def test_run_a_node_hook(tempdir_factory, store):
|
|||
)
|
||||
|
||||
|
||||
@xfailif_broken_deep_listdir
|
||||
def test_run_versioned_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_versioned_hooks_repo',
|
||||
|
|
@ -522,7 +518,6 @@ def test_additional_ruby_dependencies_installed(tempdir_factory, store):
|
|||
assert 'tins' in output
|
||||
|
||||
|
||||
@xfailif_broken_deep_listdir # pragma: windows no cover
|
||||
def test_additional_node_dependencies_installed(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'node_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
|
|
@ -805,9 +800,9 @@ def test_hook_id_not_present(tempdir_factory, store, fake_log_handler):
|
|||
with pytest.raises(SystemExit):
|
||||
_get_hook(config, store, 'i-dont-exist')
|
||||
assert fake_log_handler.handle.call_args[0][0].msg == (
|
||||
'`i-dont-exist` is not present in repository file://{}. '
|
||||
'Typo? Perhaps it is introduced in a newer version? '
|
||||
'Often `pre-commit autoupdate` fixes this.'.format(path)
|
||||
f'`i-dont-exist` is not present in repository file://{path}. '
|
||||
f'Typo? Perhaps it is introduced in a newer version? '
|
||||
f'Often `pre-commit autoupdate` fixes this.'
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue