mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Merge pull request #1277 from pre-commit/pyupgrade
Drop python 2 / python3.5 support in pre-commit
This commit is contained in:
commit
b2faf339ce
101 changed files with 1280 additions and 1040 deletions
|
|
@ -25,6 +25,10 @@ exclude_lines =
|
|||
^\s*return NotImplemented\b
|
||||
^\s*raise$
|
||||
|
||||
# Ignore typing-related things
|
||||
^if (False|TYPE_CHECKING):
|
||||
: \.\.\.$
|
||||
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
^if __name__ == ['"]__main__['"]:$
|
||||
|
||||
|
|
|
|||
16
.gitignore
vendored
16
.gitignore
vendored
|
|
@ -1,14 +1,8 @@
|
|||
*.egg-info
|
||||
*.iml
|
||||
*.py[co]
|
||||
.*.sw[a-z]
|
||||
.coverage
|
||||
.idea
|
||||
.project
|
||||
.pydevproject
|
||||
.tox
|
||||
.venv.touch
|
||||
/.coverage
|
||||
/.mypy_cache
|
||||
/.pytest_cache
|
||||
/.tox
|
||||
/dist
|
||||
/venv*
|
||||
coverage-html
|
||||
dist
|
||||
.pytest_cache
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.1.0
|
||||
rev: v2.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
|
@ -12,30 +12,41 @@ repos:
|
|||
- id: requirements-txt-fixer
|
||||
- id: double-quote-string-fixer
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.7.7
|
||||
rev: 3.7.9
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/pre-commit/mirrors-autopep8
|
||||
rev: v1.4.3
|
||||
rev: v1.4.4
|
||||
hooks:
|
||||
- id: autopep8
|
||||
- repo: https://github.com/pre-commit/pre-commit
|
||||
rev: v1.14.4
|
||||
rev: v1.21.0
|
||||
hooks:
|
||||
- id: validate_manifest
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v1.12.0
|
||||
rev: v1.25.3
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v1.4.0
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
language_version: python3
|
||||
args: [--py3-plus]
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
rev: v1.0.0
|
||||
rev: v1.5.0
|
||||
hooks:
|
||||
- id: add-trailing-comma
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: setup-cfg-fmt
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.761
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: ^testing/resources/
|
||||
- repo: meta
|
||||
hooks:
|
||||
- id: check-hooks-apply
|
||||
|
|
|
|||
|
|
@ -10,18 +10,17 @@ resources:
|
|||
type: github
|
||||
endpoint: github
|
||||
name: asottile/azure-pipeline-templates
|
||||
ref: refs/tags/v0.0.15
|
||||
ref: refs/tags/v1.0.0
|
||||
|
||||
jobs:
|
||||
- template: job--pre-commit.yml@asottile
|
||||
- template: job--python-tox.yml@asottile
|
||||
parameters:
|
||||
toxenvs: [py27, py37]
|
||||
toxenvs: [py37]
|
||||
os: windows
|
||||
additional_variables:
|
||||
COVERAGE_IGNORE_WINDOWS: '# pragma: windows no cover'
|
||||
TOX_TESTENV_PASSENV: COVERAGE_IGNORE_WINDOWS
|
||||
TEMP: C:\Temp # remove when dropping python2
|
||||
pre_test:
|
||||
- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
|
||||
displayName: Add conda to PATH
|
||||
|
|
@ -39,7 +38,7 @@ jobs:
|
|||
displayName: install swift
|
||||
- template: job--python-tox.yml@asottile
|
||||
parameters:
|
||||
toxenvs: [pypy, pypy3, py27, py36, py37, py38]
|
||||
toxenvs: [pypy3, py36, py37, py38]
|
||||
os: linux
|
||||
pre_test:
|
||||
- task: UseRubyVersion@0
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from pre_commit.main import main
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import logging
|
||||
import pipes
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
import cfgv
|
||||
from aspy.yaml import ordered_load
|
||||
|
|
@ -21,7 +22,7 @@ logger = logging.getLogger('pre_commit')
|
|||
check_string_regex = cfgv.check_and(cfgv.check_string, cfgv.check_regex)
|
||||
|
||||
|
||||
def check_type_tag(tag):
|
||||
def check_type_tag(tag: str) -> None:
|
||||
if tag not in ALL_TAGS:
|
||||
raise cfgv.ValidationError(
|
||||
'Type tag {!r} is not recognized. '
|
||||
|
|
@ -29,7 +30,7 @@ def check_type_tag(tag):
|
|||
)
|
||||
|
||||
|
||||
def check_min_version(version):
|
||||
def check_min_version(version: str) -> None:
|
||||
if parse_version(version) > parse_version(C.VERSION):
|
||||
raise cfgv.ValidationError(
|
||||
'pre-commit version {} is required but version {} is installed. '
|
||||
|
|
@ -39,7 +40,7 @@ def check_min_version(version):
|
|||
)
|
||||
|
||||
|
||||
def _make_argparser(filenames_help):
|
||||
def _make_argparser(filenames_help: str) -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('filenames', nargs='*', help=filenames_help)
|
||||
parser.add_argument('-V', '--version', action='version', version=C.VERSION)
|
||||
|
|
@ -89,7 +90,7 @@ load_manifest = functools.partial(
|
|||
)
|
||||
|
||||
|
||||
def validate_manifest_main(argv=None):
|
||||
def validate_manifest_main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = _make_argparser('Manifest filenames.')
|
||||
args = parser.parse_args(argv)
|
||||
ret = 0
|
||||
|
|
@ -106,11 +107,11 @@ LOCAL = 'local'
|
|||
META = 'meta'
|
||||
|
||||
|
||||
class MigrateShaToRev(object):
|
||||
class MigrateShaToRev:
|
||||
key = 'rev'
|
||||
|
||||
@staticmethod
|
||||
def _cond(key):
|
||||
def _cond(key: str) -> cfgv.Conditional:
|
||||
return cfgv.Conditional(
|
||||
key, cfgv.check_string,
|
||||
condition_key='repo',
|
||||
|
|
@ -118,7 +119,7 @@ class MigrateShaToRev(object):
|
|||
ensure_absent=True,
|
||||
)
|
||||
|
||||
def check(self, dct):
|
||||
def check(self, dct: Dict[str, Any]) -> None:
|
||||
if dct.get('repo') in {LOCAL, META}:
|
||||
self._cond('rev').check(dct)
|
||||
self._cond('sha').check(dct)
|
||||
|
|
@ -129,14 +130,14 @@ class MigrateShaToRev(object):
|
|||
else:
|
||||
self._cond('rev').check(dct)
|
||||
|
||||
def apply_default(self, dct):
|
||||
def apply_default(self, dct: Dict[str, Any]) -> None:
|
||||
if 'sha' in dct:
|
||||
dct['rev'] = dct.pop('sha')
|
||||
|
||||
remove_default = cfgv.Required.remove_default
|
||||
|
||||
|
||||
def _entry(modname):
|
||||
def _entry(modname: str) -> str:
|
||||
"""the hook `entry` is passed through `shlex.split()` by the command
|
||||
runner, so to prevent issues with spaces and backslashes (on Windows)
|
||||
it must be quoted here.
|
||||
|
|
@ -146,13 +147,21 @@ def _entry(modname):
|
|||
)
|
||||
|
||||
|
||||
def warn_unknown_keys_root(extra, orig_keys, dct):
|
||||
def warn_unknown_keys_root(
|
||||
extra: Sequence[str],
|
||||
orig_keys: Sequence[str],
|
||||
dct: Dict[str, str],
|
||||
) -> None:
|
||||
logger.warning(
|
||||
'Unexpected key(s) present at root: {}'.format(', '.join(extra)),
|
||||
)
|
||||
|
||||
|
||||
def warn_unknown_keys_repo(extra, orig_keys, dct):
|
||||
def warn_unknown_keys_repo(
|
||||
extra: Sequence[str],
|
||||
orig_keys: Sequence[str],
|
||||
dct: Dict[str, str],
|
||||
) -> None:
|
||||
logger.warning(
|
||||
'Unexpected key(s) present on {}: {}'.format(
|
||||
dct['repo'], ', '.join(extra),
|
||||
|
|
@ -202,7 +211,7 @@ META_HOOK_DICT = cfgv.Map(
|
|||
if item.key in {'name', 'language', 'entry'} else
|
||||
item
|
||||
for item in MANIFEST_HOOK_DICT.items
|
||||
])
|
||||
]),
|
||||
)
|
||||
CONFIG_HOOK_DICT = cfgv.Map(
|
||||
'Hook', 'id',
|
||||
|
|
@ -217,7 +226,7 @@ CONFIG_HOOK_DICT = cfgv.Map(
|
|||
cfgv.OptionalNoDefault(item.key, item.check_fn)
|
||||
for item in MANIFEST_HOOK_DICT.items
|
||||
if item.key != 'id'
|
||||
]
|
||||
],
|
||||
)
|
||||
CONFIG_REPO_DICT = cfgv.Map(
|
||||
'Repository', 'repo',
|
||||
|
|
@ -243,7 +252,7 @@ CONFIG_REPO_DICT = cfgv.Map(
|
|||
DEFAULT_LANGUAGE_VERSION = cfgv.Map(
|
||||
'DefaultLanguageVersion', None,
|
||||
cfgv.NoAdditionalKeys(all_languages),
|
||||
*[cfgv.Optional(x, cfgv.check_string, C.DEFAULT) for x in all_languages]
|
||||
*[cfgv.Optional(x, cfgv.check_string, C.DEFAULT) for x in all_languages],
|
||||
)
|
||||
CONFIG_SCHEMA = cfgv.Map(
|
||||
'Config', None,
|
||||
|
|
@ -284,7 +293,7 @@ class InvalidConfigError(FatalError):
|
|||
pass
|
||||
|
||||
|
||||
def ordered_load_normalize_legacy_config(contents):
|
||||
def ordered_load_normalize_legacy_config(contents: str) -> Dict[str, Any]:
|
||||
data = ordered_load(contents)
|
||||
if isinstance(data, list):
|
||||
# TODO: Once happy, issue a deprecation warning and instructions
|
||||
|
|
@ -301,7 +310,7 @@ load_config = functools.partial(
|
|||
)
|
||||
|
||||
|
||||
def validate_config_main(argv=None):
|
||||
def validate_config_main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = _make_argparser('Config filenames.')
|
||||
args = parser.parse_args(argv)
|
||||
ret = 0
|
||||
|
|
|
|||
|
|
@ -1,14 +1,12 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
terminal_supports_color = True
|
||||
if os.name == 'nt': # pragma: no cover (windows)
|
||||
if sys.platform == 'win32': # pragma: no cover (windows)
|
||||
from pre_commit.color_windows import enable_virtual_terminal_processing
|
||||
try:
|
||||
enable_virtual_terminal_processing()
|
||||
except WindowsError:
|
||||
except OSError:
|
||||
terminal_supports_color = False
|
||||
|
||||
RED = '\033[41m'
|
||||
|
|
@ -23,7 +21,7 @@ class InvalidColorSetting(ValueError):
|
|||
pass
|
||||
|
||||
|
||||
def format_color(text, color, use_color_setting):
|
||||
def format_color(text: str, color: str, use_color_setting: bool) -> str:
|
||||
"""Format text with color.
|
||||
|
||||
Args:
|
||||
|
|
@ -34,13 +32,13 @@ def format_color(text, color, use_color_setting):
|
|||
if not use_color_setting:
|
||||
return text
|
||||
else:
|
||||
return '{}{}{}'.format(color, text, NORMAL)
|
||||
return f'{color}{text}{NORMAL}'
|
||||
|
||||
|
||||
COLOR_CHOICES = ('auto', 'always', 'never')
|
||||
|
||||
|
||||
def use_color(setting):
|
||||
def use_color(setting: str) -> bool:
|
||||
"""Choose whether to use color based on the command argument.
|
||||
|
||||
Args:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
import sys
|
||||
assert sys.platform == 'win32'
|
||||
|
||||
from ctypes import POINTER # noqa: E402
|
||||
from ctypes import windll # noqa: E402
|
||||
from ctypes import WinError # noqa: E402
|
||||
from ctypes import WINFUNCTYPE # noqa: E402
|
||||
from ctypes.wintypes import BOOL # noqa: E402
|
||||
from ctypes.wintypes import DWORD # noqa: E402
|
||||
from ctypes.wintypes import HANDLE # noqa: E402
|
||||
|
||||
from ctypes import POINTER
|
||||
from ctypes import windll
|
||||
from ctypes import WinError
|
||||
from ctypes import WINFUNCTYPE
|
||||
from ctypes.wintypes import BOOL
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes.wintypes import HANDLE
|
||||
|
||||
STD_OUTPUT_HANDLE = -11
|
||||
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import os.path
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
|
||||
import six
|
||||
from aspy.yaml import ordered_dump
|
||||
from aspy.yaml import ordered_load
|
||||
|
||||
|
|
@ -18,20 +20,23 @@ from pre_commit.clientlib import load_manifest
|
|||
from pre_commit.clientlib import LOCAL
|
||||
from pre_commit.clientlib import META
|
||||
from pre_commit.commands.migrate_config import migrate_config
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import tmpdir
|
||||
|
||||
|
||||
class RevInfo(collections.namedtuple('RevInfo', ('repo', 'rev', 'frozen'))):
|
||||
__slots__ = ()
|
||||
class RevInfo(NamedTuple):
|
||||
repo: str
|
||||
rev: str
|
||||
frozen: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
def from_config(cls, config: Dict[str, Any]) -> 'RevInfo':
|
||||
return cls(config['repo'], config['rev'], None)
|
||||
|
||||
def update(self, tags_only, freeze):
|
||||
def update(self, tags_only: bool, freeze: bool) -> 'RevInfo':
|
||||
if tags_only:
|
||||
tag_cmd = ('git', 'describe', 'FETCH_HEAD', '--tags', '--abbrev=0')
|
||||
else:
|
||||
|
|
@ -59,12 +64,16 @@ class RepositoryCannotBeUpdatedError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def _check_hooks_still_exist_at_rev(repo_config, info, store):
|
||||
def _check_hooks_still_exist_at_rev(
|
||||
repo_config: Dict[str, Any],
|
||||
info: RevInfo,
|
||||
store: Store,
|
||||
) -> None:
|
||||
try:
|
||||
path = store.clone(repo_config['repo'], info.rev)
|
||||
manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
|
||||
except InvalidManifestError as e:
|
||||
raise RepositoryCannotBeUpdatedError(six.text_type(e))
|
||||
raise RepositoryCannotBeUpdatedError(str(e))
|
||||
|
||||
# See if any of our hooks were deleted with the new commits
|
||||
hooks = {hook['id'] for hook in repo_config['hooks']}
|
||||
|
|
@ -80,7 +89,11 @@ REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
|
|||
REV_LINE_FMT = '{}rev:{}{}{}{}'
|
||||
|
||||
|
||||
def _original_lines(path, rev_infos, retry=False):
|
||||
def _original_lines(
|
||||
path: str,
|
||||
rev_infos: List[Optional[RevInfo]],
|
||||
retry: bool = False,
|
||||
) -> Tuple[List[str], List[int]]:
|
||||
"""detect `rev:` lines or reformat the file"""
|
||||
with open(path) as f:
|
||||
original = f.read()
|
||||
|
|
@ -97,7 +110,7 @@ def _original_lines(path, rev_infos, retry=False):
|
|||
return _original_lines(path, rev_infos, retry=True)
|
||||
|
||||
|
||||
def _write_new_config(path, rev_infos):
|
||||
def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
|
||||
lines, idxs = _original_lines(path, rev_infos)
|
||||
|
||||
for idx, rev_info in zip(idxs, rev_infos):
|
||||
|
|
@ -108,7 +121,7 @@ def _write_new_config(path, rev_infos):
|
|||
new_rev_s = ordered_dump({'rev': rev_info.rev}, **C.YAML_DUMP_KWARGS)
|
||||
new_rev = new_rev_s.split(':', 1)[1].strip()
|
||||
if rev_info.frozen is not None:
|
||||
comment = ' # frozen: {}'.format(rev_info.frozen)
|
||||
comment = f' # frozen: {rev_info.frozen}'
|
||||
elif match.group(4).strip().startswith('# frozen:'):
|
||||
comment = ''
|
||||
else:
|
||||
|
|
@ -121,11 +134,17 @@ def _write_new_config(path, rev_infos):
|
|||
f.write(''.join(lines))
|
||||
|
||||
|
||||
def autoupdate(config_file, store, tags_only, freeze, repos=()):
|
||||
def autoupdate(
|
||||
config_file: str,
|
||||
store: Store,
|
||||
tags_only: bool,
|
||||
freeze: bool,
|
||||
repos: Sequence[str] = (),
|
||||
) -> int:
|
||||
"""Auto-update the pre-commit config to the latest versions of repos."""
|
||||
migrate_config(config_file, quiet=True)
|
||||
retv = 0
|
||||
rev_infos = []
|
||||
rev_infos: List[Optional[RevInfo]] = []
|
||||
changed = False
|
||||
|
||||
config = load_config(config_file)
|
||||
|
|
@ -138,7 +157,7 @@ def autoupdate(config_file, store, tags_only, freeze, repos=()):
|
|||
rev_infos.append(None)
|
||||
continue
|
||||
|
||||
output.write('Updating {} ... '.format(info.repo))
|
||||
output.write(f'Updating {info.repo} ... ')
|
||||
new_info = info.update(tags_only=tags_only, freeze=freeze)
|
||||
try:
|
||||
_check_hooks_still_exist_at_rev(repo_config, new_info, store)
|
||||
|
|
@ -151,10 +170,10 @@ def autoupdate(config_file, store, tags_only, freeze, repos=()):
|
|||
if new_info.rev != info.rev:
|
||||
changed = True
|
||||
if new_info.frozen:
|
||||
updated_to = '{} (frozen)'.format(new_info.frozen)
|
||||
updated_to = f'{new_info.frozen} (frozen)'
|
||||
else:
|
||||
updated_to = new_info.rev
|
||||
msg = 'updating {} -> {}.'.format(info.rev, updated_to)
|
||||
msg = f'updating {info.rev} -> {updated_to}.'
|
||||
output.write_line(msg)
|
||||
rev_infos.append(new_info)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
|
||||
from pre_commit import output
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
|
||||
def clean(store):
|
||||
def clean(store: Store) -> int:
|
||||
legacy_path = os.path.expanduser('~/.pre-commit')
|
||||
for directory in (store.directory, legacy_path):
|
||||
if os.path.exists(directory):
|
||||
rmtree(directory)
|
||||
output.write_line('Cleaned {}.'.format(directory))
|
||||
output.write_line(f'Cleaned {directory}.')
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import output
|
||||
|
|
@ -11,9 +12,15 @@ from pre_commit.clientlib import load_config
|
|||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.clientlib import LOCAL
|
||||
from pre_commit.clientlib import META
|
||||
from pre_commit.store import Store
|
||||
|
||||
|
||||
def _mark_used_repos(store, all_repos, unused_repos, repo):
|
||||
def _mark_used_repos(
|
||||
store: Store,
|
||||
all_repos: Dict[Tuple[str, str], str],
|
||||
unused_repos: Set[Tuple[str, str]],
|
||||
repo: Dict[str, Any],
|
||||
) -> None:
|
||||
if repo['repo'] == META:
|
||||
return
|
||||
elif repo['repo'] == LOCAL:
|
||||
|
|
@ -50,7 +57,7 @@ def _mark_used_repos(store, all_repos, unused_repos, repo):
|
|||
))
|
||||
|
||||
|
||||
def _gc_repos(store):
|
||||
def _gc_repos(store: Store) -> int:
|
||||
configs = store.select_all_configs()
|
||||
repos = store.select_all_repos()
|
||||
|
||||
|
|
@ -76,8 +83,8 @@ def _gc_repos(store):
|
|||
return len(unused_repos)
|
||||
|
||||
|
||||
def gc(store):
|
||||
def gc(store: Store) -> int:
|
||||
with store.exclusive_lock():
|
||||
repos_removed = _gc_repos(store)
|
||||
output.write_line('{} repo(s) removed.'.format(repos_removed))
|
||||
output.write_line(f'{repos_removed} repo(s) removed.')
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,14 +1,21 @@
|
|||
import logging
|
||||
import os.path
|
||||
from typing import Sequence
|
||||
|
||||
from pre_commit.commands.install_uninstall import install
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def init_templatedir(config_file, store, directory, hook_types):
|
||||
def init_templatedir(
|
||||
config_file: str,
|
||||
store: Store,
|
||||
directory: str,
|
||||
hook_types: Sequence[str],
|
||||
) -> int:
|
||||
install(
|
||||
config_file, store, hook_types=hook_types,
|
||||
overwrite=True, skip_on_missing_config=True, git_dir=directory,
|
||||
|
|
@ -23,5 +30,6 @@ def init_templatedir(config_file, store, directory, hook_types):
|
|||
if configured_path != dest:
|
||||
logger.warning('`init.templateDir` not set to the target directory')
|
||||
logger.warning(
|
||||
'maybe `git config --global init.templateDir {}`?'.format(dest),
|
||||
f'maybe `git config --global init.templateDir {dest}`?',
|
||||
)
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,18 +1,18 @@
|
|||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import itertools
|
||||
import logging
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit import output
|
||||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.repository import all_hooks
|
||||
from pre_commit.repository import install_hook_envs
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import mkdirp
|
||||
from pre_commit.util import resource_text
|
||||
|
|
@ -33,21 +33,24 @@ TEMPLATE_START = '# start templated\n'
|
|||
TEMPLATE_END = '# end templated\n'
|
||||
|
||||
|
||||
def _hook_paths(hook_type, git_dir=None):
|
||||
def _hook_paths(
|
||||
hook_type: str,
|
||||
git_dir: Optional[str] = None,
|
||||
) -> Tuple[str, str]:
|
||||
git_dir = git_dir if git_dir is not None else git.get_git_dir()
|
||||
pth = os.path.join(git_dir, 'hooks', hook_type)
|
||||
return pth, '{}.legacy'.format(pth)
|
||||
return pth, f'{pth}.legacy'
|
||||
|
||||
|
||||
def is_our_script(filename):
|
||||
def is_our_script(filename: str) -> bool:
|
||||
if not os.path.exists(filename): # pragma: windows no cover (symlink)
|
||||
return False
|
||||
with io.open(filename) as f:
|
||||
with open(filename) as f:
|
||||
contents = f.read()
|
||||
return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)
|
||||
|
||||
|
||||
def shebang():
|
||||
def shebang() -> str:
|
||||
if sys.platform == 'win32':
|
||||
py = 'python'
|
||||
else:
|
||||
|
|
@ -63,13 +66,16 @@ def shebang():
|
|||
break
|
||||
else:
|
||||
py = 'python'
|
||||
return '#!/usr/bin/env {}'.format(py)
|
||||
return f'#!/usr/bin/env {py}'
|
||||
|
||||
|
||||
def _install_hook_script(
|
||||
config_file, hook_type,
|
||||
overwrite=False, skip_on_missing_config=False, git_dir=None,
|
||||
):
|
||||
config_file: str,
|
||||
hook_type: str,
|
||||
overwrite: bool = False,
|
||||
skip_on_missing_config: bool = False,
|
||||
git_dir: Optional[str] = None,
|
||||
) -> None:
|
||||
hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir)
|
||||
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
|
|
@ -94,7 +100,7 @@ def _install_hook_script(
|
|||
'SKIP_ON_MISSING_CONFIG': skip_on_missing_config,
|
||||
}
|
||||
|
||||
with io.open(hook_path, 'w') as hook_file:
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
contents = resource_text('hook-tmpl')
|
||||
before, rest = contents.split(TEMPLATE_START)
|
||||
to_template, after = rest.split(TEMPLATE_END)
|
||||
|
|
@ -108,14 +114,18 @@ def _install_hook_script(
|
|||
hook_file.write(TEMPLATE_END + after)
|
||||
make_executable(hook_path)
|
||||
|
||||
output.write_line('pre-commit installed at {}'.format(hook_path))
|
||||
output.write_line(f'pre-commit installed at {hook_path}')
|
||||
|
||||
|
||||
def install(
|
||||
config_file, store, hook_types,
|
||||
overwrite=False, hooks=False,
|
||||
skip_on_missing_config=False, git_dir=None,
|
||||
):
|
||||
config_file: str,
|
||||
store: Store,
|
||||
hook_types: Sequence[str],
|
||||
overwrite: bool = False,
|
||||
hooks: bool = False,
|
||||
skip_on_missing_config: bool = False,
|
||||
git_dir: Optional[str] = None,
|
||||
) -> int:
|
||||
if git.has_core_hookpaths_set():
|
||||
logger.error(
|
||||
'Cowardly refusing to install hooks with `core.hooksPath` set.\n'
|
||||
|
|
@ -137,11 +147,12 @@ def install(
|
|||
return 0
|
||||
|
||||
|
||||
def install_hooks(config_file, store):
|
||||
def install_hooks(config_file: str, store: Store) -> int:
|
||||
install_hook_envs(all_hooks(load_config(config_file), store), store)
|
||||
return 0
|
||||
|
||||
|
||||
def _uninstall_hook_script(hook_type): # type: (str) -> None
|
||||
def _uninstall_hook_script(hook_type: str) -> None:
|
||||
hook_path, legacy_path = _hook_paths(hook_type)
|
||||
|
||||
# If our file doesn't exist or it isn't ours, gtfo.
|
||||
|
|
@ -149,14 +160,14 @@ def _uninstall_hook_script(hook_type): # type: (str) -> None
|
|||
return
|
||||
|
||||
os.remove(hook_path)
|
||||
output.write_line('{} uninstalled'.format(hook_type))
|
||||
output.write_line(f'{hook_type} uninstalled')
|
||||
|
||||
if os.path.exists(legacy_path):
|
||||
os.rename(legacy_path, hook_path)
|
||||
output.write_line('Restored previous hooks to {}'.format(hook_path))
|
||||
output.write_line(f'Restored previous hooks to {hook_path}')
|
||||
|
||||
|
||||
def uninstall(hook_types):
|
||||
def uninstall(hook_types: Sequence[str]) -> int:
|
||||
for hook_type in hook_types:
|
||||
_uninstall_hook_script(hook_type)
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,23 +1,19 @@
|
|||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from aspy.yaml import ordered_load
|
||||
|
||||
|
||||
def _indent(s):
|
||||
def _indent(s: str) -> str:
|
||||
lines = s.splitlines(True)
|
||||
return ''.join(' ' * 4 + line if line.strip() else line for line in lines)
|
||||
|
||||
|
||||
def _is_header_line(line):
|
||||
return (line.startswith(('#', '---')) or not line.strip())
|
||||
def _is_header_line(line: str) -> bool:
|
||||
return line.startswith(('#', '---')) or not line.strip()
|
||||
|
||||
|
||||
def _migrate_map(contents):
|
||||
def _migrate_map(contents: str) -> str:
|
||||
# Find the first non-header line
|
||||
lines = contents.splitlines(True)
|
||||
i = 0
|
||||
|
|
@ -41,22 +37,23 @@ def _migrate_map(contents):
|
|||
return contents
|
||||
|
||||
|
||||
def _migrate_sha_to_rev(contents):
|
||||
def _migrate_sha_to_rev(contents: str) -> str:
|
||||
reg = re.compile(r'(\n\s+)sha:')
|
||||
return reg.sub(r'\1rev:', contents)
|
||||
|
||||
|
||||
def migrate_config(config_file, quiet=False):
|
||||
with io.open(config_file) as f:
|
||||
def migrate_config(config_file: str, quiet: bool = False) -> int:
|
||||
with open(config_file) as f:
|
||||
orig_contents = contents = f.read()
|
||||
|
||||
contents = _migrate_map(contents)
|
||||
contents = _migrate_sha_to_rev(contents)
|
||||
|
||||
if contents != orig_contents:
|
||||
with io.open(config_file, 'w') as f:
|
||||
with open(config_file, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
print('Configuration has been migrated.')
|
||||
elif not quiet:
|
||||
print('Configuration is already migrated.')
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,10 +1,17 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Any
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
|
||||
from identify.identify import tags_from_path
|
||||
|
||||
|
|
@ -14,16 +21,23 @@ from pre_commit import output
|
|||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.output import get_hook_message
|
||||
from pre_commit.repository import all_hooks
|
||||
from pre_commit.repository import Hook
|
||||
from pre_commit.repository import install_hook_envs
|
||||
from pre_commit.staged_files_only import staged_files_only
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import EnvironT
|
||||
from pre_commit.util import noop_context
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def filter_by_include_exclude(names, include, exclude):
|
||||
def filter_by_include_exclude(
|
||||
names: Collection[str],
|
||||
include: str,
|
||||
exclude: str,
|
||||
) -> List[str]:
|
||||
include_re, exclude_re = re.compile(include), re.compile(exclude)
|
||||
return [
|
||||
filename for filename in names
|
||||
|
|
@ -32,25 +46,26 @@ def filter_by_include_exclude(names, include, exclude):
|
|||
]
|
||||
|
||||
|
||||
class Classifier(object):
|
||||
def __init__(self, filenames):
|
||||
class Classifier:
|
||||
def __init__(self, filenames: Sequence[str]) -> None:
|
||||
# on windows we normalize all filenames to use forward slashes
|
||||
# this makes it easier to filter using the `files:` regex
|
||||
# this also makes improperly quoted shell-based hooks work better
|
||||
# see #1173
|
||||
if os.altsep == '/' and os.sep == '\\':
|
||||
filenames = (f.replace(os.sep, os.altsep) for f in filenames)
|
||||
filenames = [f.replace(os.sep, os.altsep) for f in filenames]
|
||||
self.filenames = [f for f in filenames if os.path.lexists(f)]
|
||||
self._types_cache = {}
|
||||
|
||||
def _types_for_file(self, filename):
|
||||
try:
|
||||
return self._types_cache[filename]
|
||||
except KeyError:
|
||||
ret = self._types_cache[filename] = tags_from_path(filename)
|
||||
return ret
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _types_for_file(self, filename: str) -> Set[str]:
|
||||
return tags_from_path(filename)
|
||||
|
||||
def by_types(self, names, types, exclude_types):
|
||||
def by_types(
|
||||
self,
|
||||
names: Sequence[str],
|
||||
types: Collection[str],
|
||||
exclude_types: Collection[str],
|
||||
) -> List[str]:
|
||||
types, exclude_types = frozenset(types), frozenset(exclude_types)
|
||||
ret = []
|
||||
for filename in names:
|
||||
|
|
@ -59,14 +74,14 @@ class Classifier(object):
|
|||
ret.append(filename)
|
||||
return ret
|
||||
|
||||
def filenames_for_hook(self, hook):
|
||||
def filenames_for_hook(self, hook: Hook) -> Tuple[str, ...]:
|
||||
names = self.filenames
|
||||
names = filter_by_include_exclude(names, hook.files, hook.exclude)
|
||||
names = self.by_types(names, hook.types, hook.exclude_types)
|
||||
return names
|
||||
return tuple(names)
|
||||
|
||||
|
||||
def _get_skips(environ):
|
||||
def _get_skips(environ: EnvironT) -> Set[str]:
|
||||
skips = environ.get('SKIP', '')
|
||||
return {skip.strip() for skip in skips.split(',') if skip.strip()}
|
||||
|
||||
|
|
@ -75,11 +90,18 @@ SKIPPED = 'Skipped'
|
|||
NO_FILES = '(no files to check)'
|
||||
|
||||
|
||||
def _subtle_line(s, use_color):
|
||||
def _subtle_line(s: str, use_color: bool) -> None:
|
||||
output.write_line(color.format_color(s, color.SUBTLE, use_color))
|
||||
|
||||
|
||||
def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
|
||||
def _run_single_hook(
|
||||
classifier: Classifier,
|
||||
hook: Hook,
|
||||
skips: Set[str],
|
||||
cols: int,
|
||||
verbose: bool,
|
||||
use_color: bool,
|
||||
) -> bool:
|
||||
filenames = classifier.filenames_for_hook(hook)
|
||||
|
||||
if hook.id in skips or hook.alias in skips:
|
||||
|
|
@ -117,7 +139,8 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
|
|||
|
||||
diff_cmd = ('git', 'diff', '--no-ext-diff')
|
||||
diff_before = cmd_output_b(*diff_cmd, retcode=None)
|
||||
filenames = tuple(filenames) if hook.pass_filenames else ()
|
||||
if not hook.pass_filenames:
|
||||
filenames = ()
|
||||
time_before = time.time()
|
||||
retcode, out = hook.run(filenames, use_color)
|
||||
duration = round(time.time() - time_before, 2) or 0
|
||||
|
|
@ -136,13 +159,13 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
|
|||
output.write_line(color.format_color(status, print_color, use_color))
|
||||
|
||||
if verbose or hook.verbose or retcode or files_modified:
|
||||
_subtle_line('- hook id: {}'.format(hook.id), use_color)
|
||||
_subtle_line(f'- hook id: {hook.id}', use_color)
|
||||
|
||||
if (verbose or hook.verbose) and duration is not None:
|
||||
_subtle_line('- duration: {}s'.format(duration), use_color)
|
||||
_subtle_line(f'- duration: {duration}s', use_color)
|
||||
|
||||
if retcode:
|
||||
_subtle_line('- exit code: {}'.format(retcode), use_color)
|
||||
_subtle_line(f'- exit code: {retcode}', use_color)
|
||||
|
||||
# Print a message if failing due to file modifications
|
||||
if files_modified:
|
||||
|
|
@ -156,7 +179,7 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
|
|||
return files_modified or bool(retcode)
|
||||
|
||||
|
||||
def _compute_cols(hooks):
|
||||
def _compute_cols(hooks: Sequence[Hook]) -> int:
|
||||
"""Compute the number of columns to display hook messages. The widest
|
||||
that will be displayed is in the no files skipped case:
|
||||
|
||||
|
|
@ -171,7 +194,7 @@ def _compute_cols(hooks):
|
|||
return max(cols, 80)
|
||||
|
||||
|
||||
def _all_filenames(args):
|
||||
def _all_filenames(args: argparse.Namespace) -> Collection[str]:
|
||||
if args.origin and args.source:
|
||||
return git.get_changed_files(args.origin, args.source)
|
||||
elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
|
||||
|
|
@ -186,7 +209,12 @@ def _all_filenames(args):
|
|||
return git.get_staged_files()
|
||||
|
||||
|
||||
def _run_hooks(config, hooks, args, environ):
|
||||
def _run_hooks(
|
||||
config: Dict[str, Any],
|
||||
hooks: Sequence[Hook],
|
||||
args: argparse.Namespace,
|
||||
environ: EnvironT,
|
||||
) -> int:
|
||||
"""Actually run the hooks."""
|
||||
skips = _get_skips(environ)
|
||||
cols = _compute_cols(hooks)
|
||||
|
|
@ -223,12 +251,12 @@ def _run_hooks(config, hooks, args, environ):
|
|||
return retval
|
||||
|
||||
|
||||
def _has_unmerged_paths():
|
||||
def _has_unmerged_paths() -> bool:
|
||||
_, stdout, _ = cmd_output_b('git', 'ls-files', '--unmerged')
|
||||
return bool(stdout.strip())
|
||||
|
||||
|
||||
def _has_unstaged_config(config_file):
|
||||
def _has_unstaged_config(config_file: str) -> bool:
|
||||
retcode, _, _ = cmd_output_b(
|
||||
'git', 'diff', '--no-ext-diff', '--exit-code', config_file,
|
||||
retcode=None,
|
||||
|
|
@ -237,7 +265,12 @@ def _has_unstaged_config(config_file):
|
|||
return retcode == 1
|
||||
|
||||
|
||||
def run(config_file, store, args, environ=os.environ):
|
||||
def run(
|
||||
config_file: str,
|
||||
store: Store,
|
||||
args: argparse.Namespace,
|
||||
environ: EnvironT = os.environ,
|
||||
) -> int:
|
||||
no_stash = args.all_files or bool(args.files)
|
||||
|
||||
# Check if we have unresolved merge conflict files and fail fast.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
# TODO: maybe `git ls-remote git://github.com/pre-commit/pre-commit-hooks` to
|
||||
# determine the latest revision? This adds ~200ms from my tests (and is
|
||||
# significantly faster than https:// or http://). For now, periodically
|
||||
|
|
@ -21,6 +16,6 @@ repos:
|
|||
'''
|
||||
|
||||
|
||||
def sample_config():
|
||||
def sample_config() -> int:
|
||||
print(SAMPLE_CONFIG, end='')
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import logging
|
||||
import os.path
|
||||
from typing import Tuple
|
||||
|
||||
from aspy.yaml import ordered_dump
|
||||
|
||||
|
|
@ -20,7 +19,7 @@ from pre_commit.xargs import xargs
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _repo_ref(tmpdir, repo, ref):
|
||||
def _repo_ref(tmpdir: str, repo: str, ref: str) -> Tuple[str, str]:
|
||||
# if `ref` is explicitly passed, use it
|
||||
if ref:
|
||||
return repo, ref
|
||||
|
|
@ -50,7 +49,7 @@ def _repo_ref(tmpdir, repo, ref):
|
|||
return repo, ref
|
||||
|
||||
|
||||
def try_repo(args):
|
||||
def try_repo(args: argparse.Namespace) -> int:
|
||||
with tmpdir() as tempdir:
|
||||
repo, ref = _repo_ref(tempdir, args.repo, args.ref)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 8): # pragma: no cover (<PY38)
|
||||
|
|
|
|||
|
|
@ -1,19 +1,33 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import enum
|
||||
import os
|
||||
from typing import Generator
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
from pre_commit.util import EnvironT
|
||||
|
||||
|
||||
UNSET = collections.namedtuple('UNSET', ())()
|
||||
class _Unset(enum.Enum):
|
||||
UNSET = 1
|
||||
|
||||
|
||||
Var = collections.namedtuple('Var', ('name', 'default'))
|
||||
Var.__new__.__defaults__ = ('',)
|
||||
UNSET = _Unset.UNSET
|
||||
|
||||
|
||||
def format_env(parts, env):
|
||||
class Var(NamedTuple):
|
||||
name: str
|
||||
default: str = ''
|
||||
|
||||
|
||||
SubstitutionT = Tuple[Union[str, Var], ...]
|
||||
ValueT = Union[str, _Unset, SubstitutionT]
|
||||
PatchesT = Tuple[Tuple[str, ValueT], ...]
|
||||
|
||||
|
||||
def format_env(parts: SubstitutionT, env: EnvironT) -> str:
|
||||
return ''.join(
|
||||
env.get(part.name, part.default) if isinstance(part, Var) else part
|
||||
for part in parts
|
||||
|
|
@ -21,7 +35,10 @@ def format_env(parts, env):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def envcontext(patch, _env=None):
|
||||
def envcontext(
|
||||
patch: PatchesT,
|
||||
_env: Optional[EnvironT] = None,
|
||||
) -> Generator[None, None, None]:
|
||||
"""In this context, `os.environ` is modified according to `patch`.
|
||||
|
||||
`patch` is an iterable of 2-tuples (key, value):
|
||||
|
|
|
|||
|
|
@ -1,13 +1,9 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import six
|
||||
from typing import Generator
|
||||
from typing import Union
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
|
|
@ -19,14 +15,11 @@ class FatalError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def _to_bytes(exc):
|
||||
try:
|
||||
return bytes(exc)
|
||||
except Exception:
|
||||
return six.text_type(exc).encode('UTF-8')
|
||||
def _to_bytes(exc: BaseException) -> bytes:
|
||||
return str(exc).encode('UTF-8')
|
||||
|
||||
|
||||
def _log_and_exit(msg, exc, formatted):
|
||||
def _log_and_exit(msg: str, exc: BaseException, formatted: str) -> None:
|
||||
error_msg = b''.join((
|
||||
five.to_bytes(msg), b': ',
|
||||
five.to_bytes(type(exc).__name__), b': ',
|
||||
|
|
@ -35,22 +28,22 @@ def _log_and_exit(msg, exc, formatted):
|
|||
output.write_line(error_msg)
|
||||
store = Store()
|
||||
log_path = os.path.join(store.directory, 'pre-commit.log')
|
||||
output.write_line('Check the log at {}'.format(log_path))
|
||||
output.write_line(f'Check the log at {log_path}')
|
||||
|
||||
with open(log_path, 'wb') as log:
|
||||
def _log_line(*s): # type: (*str) -> None
|
||||
output.write_line(*s, stream=log)
|
||||
def _log_line(s: Union[None, str, bytes] = None) -> None:
|
||||
output.write_line(s, stream=log)
|
||||
|
||||
_log_line('### version information')
|
||||
_log_line()
|
||||
_log_line('```')
|
||||
_log_line('pre-commit version: {}'.format(C.VERSION))
|
||||
_log_line(f'pre-commit version: {C.VERSION}')
|
||||
_log_line('sys.version:')
|
||||
for line in sys.version.splitlines():
|
||||
_log_line(' {}'.format(line))
|
||||
_log_line('sys.executable: {}'.format(sys.executable))
|
||||
_log_line('os.name: {}'.format(os.name))
|
||||
_log_line('sys.platform: {}'.format(sys.platform))
|
||||
_log_line(f' {line}')
|
||||
_log_line(f'sys.executable: {sys.executable}')
|
||||
_log_line(f'os.name: {os.name}')
|
||||
_log_line(f'sys.platform: {sys.platform}')
|
||||
_log_line('```')
|
||||
_log_line()
|
||||
|
||||
|
|
@ -67,7 +60,7 @@ def _log_and_exit(msg, exc, formatted):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def error_handler():
|
||||
def error_handler() -> Generator[None, None, None]:
|
||||
try:
|
||||
yield
|
||||
except (Exception, KeyboardInterrupt) as e:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import os
|
||||
from typing import Callable
|
||||
from typing import Generator
|
||||
|
||||
|
||||
try: # pragma: no cover (windows)
|
||||
if os.name == 'nt': # pragma: no cover (windows)
|
||||
import msvcrt
|
||||
|
||||
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/locking
|
||||
|
|
@ -15,15 +15,20 @@ try: # pragma: no cover (windows)
|
|||
_region = 0xffff
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _locked(fileno, blocked_cb):
|
||||
def _locked(
|
||||
fileno: int,
|
||||
blocked_cb: Callable[[], None],
|
||||
) -> Generator[None, None, None]:
|
||||
try:
|
||||
msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region)
|
||||
except IOError:
|
||||
# TODO: https://github.com/python/typeshed/pull/3607
|
||||
msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) # type: ignore
|
||||
except OSError:
|
||||
blocked_cb()
|
||||
while True:
|
||||
try:
|
||||
msvcrt.locking(fileno, msvcrt.LK_LOCK, _region)
|
||||
except IOError as e:
|
||||
# TODO: https://github.com/python/typeshed/pull/3607
|
||||
msvcrt.locking(fileno, msvcrt.LK_LOCK, _region) # type: ignore # noqa: E501
|
||||
except OSError as e:
|
||||
# Locking violation. Returned when the _LK_LOCK or _LK_RLCK
|
||||
# flag is specified and the file cannot be locked after 10
|
||||
# attempts.
|
||||
|
|
@ -40,15 +45,19 @@ try: # pragma: no cover (windows)
|
|||
# The documentation however states:
|
||||
# "Regions should be locked only briefly and should be unlocked
|
||||
# before closing a file or exiting the program."
|
||||
msvcrt.locking(fileno, msvcrt.LK_UNLCK, _region)
|
||||
except ImportError: # pragma: windows no cover
|
||||
# TODO: https://github.com/python/typeshed/pull/3607
|
||||
msvcrt.locking(fileno, msvcrt.LK_UNLCK, _region) # type: ignore
|
||||
else: # pragma: windows no cover
|
||||
import fcntl
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _locked(fileno, blocked_cb):
|
||||
def _locked(
|
||||
fileno: int,
|
||||
blocked_cb: Callable[[], None],
|
||||
) -> Generator[None, None, None]:
|
||||
try:
|
||||
fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
except IOError: # pragma: no cover (tests are single-threaded)
|
||||
except OSError: # pragma: no cover (tests are single-threaded)
|
||||
blocked_cb()
|
||||
fcntl.flock(fileno, fcntl.LOCK_EX)
|
||||
try:
|
||||
|
|
@ -58,7 +67,10 @@ except ImportError: # pragma: windows no cover
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def lock(path, blocked_cb):
|
||||
def lock(
|
||||
path: str,
|
||||
blocked_cb: Callable[[], None],
|
||||
) -> Generator[None, None, None]:
|
||||
with open(path, 'a+') as f:
|
||||
with _locked(f.fileno(), blocked_cb):
|
||||
yield
|
||||
|
|
|
|||
|
|
@ -1,15 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import six
|
||||
from typing import Union
|
||||
|
||||
|
||||
def to_text(s):
|
||||
return s if isinstance(s, six.text_type) else s.decode('UTF-8')
|
||||
def to_text(s: Union[str, bytes]) -> str:
|
||||
return s if isinstance(s, str) else s.decode('UTF-8')
|
||||
|
||||
|
||||
def to_bytes(s):
|
||||
def to_bytes(s: Union[str, bytes]) -> bytes:
|
||||
return s if isinstance(s, bytes) else s.encode('UTF-8')
|
||||
|
||||
|
||||
n = to_bytes if six.PY2 else to_text
|
||||
n = to_text
|
||||
|
|
|
|||
|
|
@ -1,17 +1,20 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import EnvironT
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def zsplit(s):
|
||||
def zsplit(s: str) -> List[str]:
|
||||
s = s.strip('\0')
|
||||
if s:
|
||||
return s.split('\0')
|
||||
|
|
@ -19,7 +22,7 @@ def zsplit(s):
|
|||
return []
|
||||
|
||||
|
||||
def no_git_env(_env=None):
|
||||
def no_git_env(_env: Optional[EnvironT] = None) -> Dict[str, str]:
|
||||
# Too many bugs dealing with environment variables and GIT:
|
||||
# https://github.com/pre-commit/pre-commit/issues/300
|
||||
# In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running
|
||||
|
|
@ -36,11 +39,11 @@ def no_git_env(_env=None):
|
|||
}
|
||||
|
||||
|
||||
def get_root():
|
||||
def get_root() -> str:
|
||||
return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
|
||||
|
||||
|
||||
def get_git_dir(git_root='.'):
|
||||
def get_git_dir(git_root: str = '.') -> str:
|
||||
opts = ('--git-common-dir', '--git-dir')
|
||||
_, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root)
|
||||
for line, opt in zip(out.splitlines(), opts):
|
||||
|
|
@ -50,12 +53,12 @@ def get_git_dir(git_root='.'):
|
|||
raise AssertionError('unreachable: no git dir')
|
||||
|
||||
|
||||
def get_remote_url(git_root):
|
||||
def get_remote_url(git_root: str) -> str:
|
||||
_, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root)
|
||||
return out.strip()
|
||||
|
||||
|
||||
def is_in_merge_conflict():
|
||||
def is_in_merge_conflict() -> bool:
|
||||
git_dir = get_git_dir('.')
|
||||
return (
|
||||
os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and
|
||||
|
|
@ -63,7 +66,7 @@ def is_in_merge_conflict():
|
|||
)
|
||||
|
||||
|
||||
def parse_merge_msg_for_conflicts(merge_msg):
|
||||
def parse_merge_msg_for_conflicts(merge_msg: bytes) -> List[str]:
|
||||
# Conflicted files start with tabs
|
||||
return [
|
||||
line.lstrip(b'#').strip().decode('UTF-8')
|
||||
|
|
@ -73,7 +76,7 @@ def parse_merge_msg_for_conflicts(merge_msg):
|
|||
]
|
||||
|
||||
|
||||
def get_conflicted_files():
|
||||
def get_conflicted_files() -> Set[str]:
|
||||
logger.info('Checking merge-conflict files only.')
|
||||
# Need to get the conflicted files from the MERGE_MSG because they could
|
||||
# have resolved the conflict by choosing one side or the other
|
||||
|
|
@ -94,7 +97,7 @@ def get_conflicted_files():
|
|||
return set(merge_conflict_filenames) | set(merge_diff_filenames)
|
||||
|
||||
|
||||
def get_staged_files(cwd=None):
|
||||
def get_staged_files(cwd: Optional[str] = None) -> List[str]:
|
||||
return zsplit(
|
||||
cmd_output(
|
||||
'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',
|
||||
|
|
@ -105,7 +108,7 @@ def get_staged_files(cwd=None):
|
|||
)
|
||||
|
||||
|
||||
def intent_to_add_files():
|
||||
def intent_to_add_files() -> List[str]:
|
||||
_, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')
|
||||
parts = list(reversed(zsplit(stdout)))
|
||||
intent_to_add = []
|
||||
|
|
@ -119,37 +122,35 @@ def intent_to_add_files():
|
|||
return intent_to_add
|
||||
|
||||
|
||||
def get_all_files():
|
||||
def get_all_files() -> List[str]:
|
||||
return zsplit(cmd_output('git', 'ls-files', '-z')[1])
|
||||
|
||||
|
||||
def get_changed_files(new, old):
|
||||
def get_changed_files(new: str, old: str) -> List[str]:
|
||||
return zsplit(
|
||||
cmd_output(
|
||||
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
||||
'{}...{}'.format(old, new),
|
||||
f'{old}...{new}',
|
||||
)[1],
|
||||
)
|
||||
|
||||
|
||||
def head_rev(remote):
|
||||
def head_rev(remote: str) -> str:
|
||||
_, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD')
|
||||
return out.split()[0]
|
||||
|
||||
|
||||
def has_diff(*args, **kwargs):
|
||||
repo = kwargs.pop('repo', '.')
|
||||
assert not kwargs, kwargs
|
||||
def has_diff(*args: str, repo: str = '.') -> bool:
|
||||
cmd = ('git', 'diff', '--quiet', '--no-ext-diff') + args
|
||||
return cmd_output_b(*cmd, cwd=repo, retcode=None)[0] == 1
|
||||
|
||||
|
||||
def has_core_hookpaths_set():
|
||||
def has_core_hookpaths_set() -> bool:
|
||||
_, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None)
|
||||
return bool(out.strip())
|
||||
|
||||
|
||||
def init_repo(path, remote):
|
||||
def init_repo(path: str, remote: str) -> None:
|
||||
if os.path.isdir(remote):
|
||||
remote = os.path.abspath(remote)
|
||||
|
||||
|
|
@ -158,7 +159,7 @@ def init_repo(path, remote):
|
|||
cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)
|
||||
|
||||
|
||||
def commit(repo='.'):
|
||||
def commit(repo: str = '.') -> None:
|
||||
env = no_git_env()
|
||||
name, email = 'pre-commit', 'asottile+pre-commit@umich.edu'
|
||||
env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name
|
||||
|
|
@ -167,12 +168,12 @@ def commit(repo='.'):
|
|||
cmd_output_b(*cmd, cwd=repo, env=env)
|
||||
|
||||
|
||||
def git_path(name, repo='.'):
|
||||
def git_path(name: str, repo: str = '.') -> str:
|
||||
_, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo)
|
||||
return os.path.join(repo, out.strip())
|
||||
|
||||
|
||||
def check_for_cygwin_mismatch():
|
||||
def check_for_cygwin_mismatch() -> None:
|
||||
"""See https://github.com/pre-commit/pre-commit/issues/354"""
|
||||
if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)
|
||||
is_cygwin_python = sys.platform == 'cygwin'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
from __future__ import unicode_literals
|
||||
from typing import Callable
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.languages import conda
|
||||
from pre_commit.languages import docker
|
||||
|
|
@ -14,57 +19,43 @@ from pre_commit.languages import rust
|
|||
from pre_commit.languages import script
|
||||
from pre_commit.languages import swift
|
||||
from pre_commit.languages import system
|
||||
from pre_commit.prefix import Prefix
|
||||
|
||||
# A language implements the following constant and functions in its module:
|
||||
#
|
||||
# # Use None for no environment
|
||||
# ENVIRONMENT_DIR = 'foo_env'
|
||||
#
|
||||
# def get_default_version():
|
||||
# """Return a value to replace the 'default' value for language_version.
|
||||
#
|
||||
# return 'default' if there is no better option.
|
||||
# """
|
||||
#
|
||||
# def healthy(prefix, language_version):
|
||||
# """Return whether or not the environment is considered functional."""
|
||||
#
|
||||
# def install_environment(prefix, version, additional_dependencies):
|
||||
# """Installs a repository in the given repository. Note that the current
|
||||
# working directory will already be inside the repository.
|
||||
#
|
||||
# Args:
|
||||
# prefix - `Prefix` bound to the repository.
|
||||
# version - A version specified in the hook configuration or 'default'.
|
||||
# """
|
||||
#
|
||||
# def run_hook(hook, file_args, color):
|
||||
# """Runs a hook and returns the returncode and output of running that
|
||||
# hook.
|
||||
#
|
||||
# Args:
|
||||
# hook - `Hook`
|
||||
# file_args - The files to be run
|
||||
# color - whether the hook should be given a pty (when supported)
|
||||
#
|
||||
# Returns:
|
||||
# (returncode, output)
|
||||
# """
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
|
||||
class Language(NamedTuple):
|
||||
name: str
|
||||
# Use `None` for no installation / environment
|
||||
ENVIRONMENT_DIR: Optional[str]
|
||||
# return a value to replace `'default` for `language_version`
|
||||
get_default_version: Callable[[], str]
|
||||
# return whether the environment is healthy (or should be rebuilt)
|
||||
healthy: Callable[[Prefix, str], bool]
|
||||
# install a repository for the given language and language_version
|
||||
install_environment: Callable[[Prefix, str, Sequence[str]], None]
|
||||
# execute a hook and return the exit code and output
|
||||
run_hook: 'Callable[[Hook, Sequence[str], bool], Tuple[int, bytes]]'
|
||||
|
||||
|
||||
# TODO: back to modules + Protocol: https://github.com/python/mypy/issues/5018
|
||||
languages = {
|
||||
'conda': conda,
|
||||
'docker': docker,
|
||||
'docker_image': docker_image,
|
||||
'fail': fail,
|
||||
'golang': golang,
|
||||
'node': node,
|
||||
'pygrep': pygrep,
|
||||
'python': python,
|
||||
'python_venv': python_venv,
|
||||
'ruby': ruby,
|
||||
'rust': rust,
|
||||
'script': script,
|
||||
'swift': swift,
|
||||
'system': system,
|
||||
# BEGIN GENERATED (testing/gen-languages-all)
|
||||
'conda': Language(name='conda', ENVIRONMENT_DIR=conda.ENVIRONMENT_DIR, get_default_version=conda.get_default_version, healthy=conda.healthy, install_environment=conda.install_environment, run_hook=conda.run_hook), # noqa: E501
|
||||
'docker': Language(name='docker', ENVIRONMENT_DIR=docker.ENVIRONMENT_DIR, get_default_version=docker.get_default_version, healthy=docker.healthy, install_environment=docker.install_environment, run_hook=docker.run_hook), # noqa: E501
|
||||
'docker_image': Language(name='docker_image', ENVIRONMENT_DIR=docker_image.ENVIRONMENT_DIR, get_default_version=docker_image.get_default_version, healthy=docker_image.healthy, install_environment=docker_image.install_environment, run_hook=docker_image.run_hook), # noqa: E501
|
||||
'fail': Language(name='fail', ENVIRONMENT_DIR=fail.ENVIRONMENT_DIR, get_default_version=fail.get_default_version, healthy=fail.healthy, install_environment=fail.install_environment, run_hook=fail.run_hook), # noqa: E501
|
||||
'golang': Language(name='golang', ENVIRONMENT_DIR=golang.ENVIRONMENT_DIR, get_default_version=golang.get_default_version, healthy=golang.healthy, install_environment=golang.install_environment, run_hook=golang.run_hook), # noqa: E501
|
||||
'node': Language(name='node', ENVIRONMENT_DIR=node.ENVIRONMENT_DIR, get_default_version=node.get_default_version, healthy=node.healthy, install_environment=node.install_environment, run_hook=node.run_hook), # noqa: E501
|
||||
'pygrep': Language(name='pygrep', ENVIRONMENT_DIR=pygrep.ENVIRONMENT_DIR, get_default_version=pygrep.get_default_version, healthy=pygrep.healthy, install_environment=pygrep.install_environment, run_hook=pygrep.run_hook), # noqa: E501
|
||||
'python': Language(name='python', ENVIRONMENT_DIR=python.ENVIRONMENT_DIR, get_default_version=python.get_default_version, healthy=python.healthy, install_environment=python.install_environment, run_hook=python.run_hook), # noqa: E501
|
||||
'python_venv': Language(name='python_venv', ENVIRONMENT_DIR=python_venv.ENVIRONMENT_DIR, get_default_version=python_venv.get_default_version, healthy=python_venv.healthy, install_environment=python_venv.install_environment, run_hook=python_venv.run_hook), # noqa: E501
|
||||
'ruby': Language(name='ruby', ENVIRONMENT_DIR=ruby.ENVIRONMENT_DIR, get_default_version=ruby.get_default_version, healthy=ruby.healthy, install_environment=ruby.install_environment, run_hook=ruby.run_hook), # noqa: E501
|
||||
'rust': Language(name='rust', ENVIRONMENT_DIR=rust.ENVIRONMENT_DIR, get_default_version=rust.get_default_version, healthy=rust.healthy, install_environment=rust.install_environment, run_hook=rust.run_hook), # noqa: E501
|
||||
'script': Language(name='script', ENVIRONMENT_DIR=script.ENVIRONMENT_DIR, get_default_version=script.get_default_version, healthy=script.healthy, install_environment=script.install_environment, run_hook=script.run_hook), # noqa: E501
|
||||
'swift': Language(name='swift', ENVIRONMENT_DIR=swift.ENVIRONMENT_DIR, get_default_version=swift.get_default_version, healthy=swift.healthy, install_environment=swift.install_environment, run_hook=swift.run_hook), # noqa: E501
|
||||
'system': Language(name='system', ENVIRONMENT_DIR=system.ENVIRONMENT_DIR, get_default_version=system.get_default_version, healthy=system.healthy, install_environment=system.install_environment, run_hook=system.run_hook), # noqa: E501
|
||||
# END GENERATED
|
||||
}
|
||||
all_languages = sorted(languages)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,34 @@
|
|||
import contextlib
|
||||
import os
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import SubstitutionT
|
||||
from pre_commit.envcontext import UNSET
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'conda'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def get_env_patch(env):
|
||||
def get_env_patch(env: str) -> PatchesT:
|
||||
# On non-windows systems executable live in $CONDA_PREFIX/bin, on Windows
|
||||
# they can be in $CONDA_PREFIX/bin, $CONDA_PREFIX/Library/bin,
|
||||
# $CONDA_PREFIX/Scripts and $CONDA_PREFIX. Whereas the latter only
|
||||
# seems to be used for python.exe.
|
||||
path = (os.path.join(env, 'bin'), os.pathsep, Var('PATH'))
|
||||
path: SubstitutionT = (os.path.join(env, 'bin'), os.pathsep, Var('PATH'))
|
||||
if os.name == 'nt': # pragma: no cover (platform specific)
|
||||
path = (env, os.pathsep) + path
|
||||
path = (os.path.join(env, 'Scripts'), os.pathsep) + path
|
||||
|
|
@ -33,14 +43,21 @@ def get_env_patch(env):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix, language_version):
|
||||
def in_env(
|
||||
prefix: Prefix,
|
||||
language_version: str,
|
||||
) -> Generator[None, None, None]:
|
||||
directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version)
|
||||
envdir = prefix.path(directory)
|
||||
with envcontext(get_env_patch(envdir)):
|
||||
yield
|
||||
|
||||
|
||||
def install_environment(prefix, version, additional_dependencies):
|
||||
def install_environment(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
additional_dependencies: Sequence[str],
|
||||
) -> None:
|
||||
helpers.assert_version_default('conda', version)
|
||||
directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
|
||||
|
||||
|
|
@ -53,11 +70,15 @@ def install_environment(prefix, version, additional_dependencies):
|
|||
if additional_dependencies:
|
||||
cmd_output_b(
|
||||
'conda', 'install', '-p', env_dir, *additional_dependencies,
|
||||
cwd=prefix.prefix_dir
|
||||
cwd=prefix.prefix_dir,
|
||||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
# TODO: Some rare commands need to be run using `conda run` but mostly we
|
||||
# can run them withot which is much quicker and produces a better
|
||||
# output.
|
||||
|
|
|
|||
|
|
@ -1,16 +1,18 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'docker'
|
||||
PRE_COMMIT_LABEL = 'PRE_COMMIT'
|
||||
|
|
@ -18,16 +20,16 @@ get_default_version = helpers.basic_get_default_version
|
|||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def md5(s): # pragma: windows no cover
|
||||
return hashlib.md5(five.to_bytes(s)).hexdigest()
|
||||
def md5(s: str) -> str: # pragma: windows no cover
|
||||
return hashlib.md5(s.encode()).hexdigest()
|
||||
|
||||
|
||||
def docker_tag(prefix): # pragma: windows no cover
|
||||
def docker_tag(prefix: Prefix) -> str: # pragma: windows no cover
|
||||
md5sum = md5(os.path.basename(prefix.prefix_dir)).lower()
|
||||
return 'pre-commit-{}'.format(md5sum)
|
||||
return f'pre-commit-{md5sum}'
|
||||
|
||||
|
||||
def docker_is_running(): # pragma: windows no cover
|
||||
def docker_is_running() -> bool: # pragma: windows no cover
|
||||
try:
|
||||
cmd_output_b('docker', 'ps')
|
||||
except CalledProcessError:
|
||||
|
|
@ -36,16 +38,18 @@ def docker_is_running(): # pragma: windows no cover
|
|||
return True
|
||||
|
||||
|
||||
def assert_docker_available(): # pragma: windows no cover
|
||||
def assert_docker_available() -> None: # pragma: windows no cover
|
||||
assert docker_is_running(), (
|
||||
'Docker is either not running or not configured in this environment'
|
||||
)
|
||||
|
||||
|
||||
def build_docker_image(prefix, **kwargs): # pragma: windows no cover
|
||||
pull = kwargs.pop('pull')
|
||||
assert not kwargs, kwargs
|
||||
cmd = (
|
||||
def build_docker_image(
|
||||
prefix: Prefix,
|
||||
*,
|
||||
pull: bool,
|
||||
) -> None: # pragma: windows no cover
|
||||
cmd: Tuple[str, ...] = (
|
||||
'docker', 'build',
|
||||
'--tag', docker_tag(prefix),
|
||||
'--label', PRE_COMMIT_LABEL,
|
||||
|
|
@ -58,8 +62,8 @@ def build_docker_image(prefix, **kwargs): # pragma: windows no cover
|
|||
|
||||
|
||||
def install_environment(
|
||||
prefix, version, additional_dependencies,
|
||||
): # pragma: windows no cover
|
||||
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
|
||||
) -> None: # pragma: windows no cover
|
||||
helpers.assert_version_default('docker', version)
|
||||
helpers.assert_no_additional_deps('docker', additional_dependencies)
|
||||
assert_docker_available()
|
||||
|
|
@ -75,14 +79,14 @@ def install_environment(
|
|||
os.mkdir(directory)
|
||||
|
||||
|
||||
def get_docker_user(): # pragma: windows no cover
|
||||
def get_docker_user() -> str: # pragma: windows no cover
|
||||
try:
|
||||
return '{}:{}'.format(os.getuid(), os.getgid())
|
||||
except AttributeError:
|
||||
return '1000:1000'
|
||||
|
||||
|
||||
def docker_cmd(): # pragma: windows no cover
|
||||
def docker_cmd() -> Tuple[str, ...]: # pragma: windows no cover
|
||||
return (
|
||||
'docker', 'run',
|
||||
'--rm',
|
||||
|
|
@ -95,7 +99,11 @@ def docker_cmd(): # pragma: windows no cover
|
|||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color): # pragma: windows no cover
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
assert_docker_available()
|
||||
# Rebuild the docker image in case it has gone missing, as many people do
|
||||
# automated cleanup of docker images.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.languages.docker import assert_docker_available
|
||||
from pre_commit.languages.docker import docker_cmd
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = None
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
|
|
@ -12,7 +15,11 @@ healthy = helpers.basic_healthy
|
|||
install_environment = helpers.no_install
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color): # pragma: windows no cover
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
assert_docker_available()
|
||||
cmd = docker_cmd() + hook.cmd
|
||||
return helpers.run_xargs(hook, cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from __future__ import unicode_literals
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.languages import helpers
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = None
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
|
|
@ -9,7 +13,11 @@ healthy = helpers.basic_healthy
|
|||
install_environment = helpers.no_install
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
out = hook.entry.encode('UTF-8') + b'\n\n'
|
||||
out += b'\n'.join(f.encode('UTF-8') for f in file_args) + b'\n'
|
||||
return 1, out
|
||||
|
|
|
|||
|
|
@ -1,33 +1,39 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'golangenv'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def get_env_patch(venv):
|
||||
def get_env_patch(venv: str) -> PatchesT:
|
||||
return (
|
||||
('PATH', (os.path.join(venv, 'bin'), os.pathsep, Var('PATH'))),
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix):
|
||||
def in_env(prefix: Prefix) -> Generator[None, None, None]:
|
||||
envdir = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
|
||||
)
|
||||
|
|
@ -35,7 +41,7 @@ def in_env(prefix):
|
|||
yield
|
||||
|
||||
|
||||
def guess_go_dir(remote_url):
|
||||
def guess_go_dir(remote_url: str) -> str:
|
||||
if remote_url.endswith('.git'):
|
||||
remote_url = remote_url[:-1 * len('.git')]
|
||||
looks_like_url = (
|
||||
|
|
@ -51,7 +57,11 @@ def guess_go_dir(remote_url):
|
|||
return 'unknown_src_dir'
|
||||
|
||||
|
||||
def install_environment(prefix, version, additional_dependencies):
|
||||
def install_environment(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
additional_dependencies: Sequence[str],
|
||||
) -> None:
|
||||
helpers.assert_version_default('golang', version)
|
||||
directory = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
|
||||
|
|
@ -81,6 +91,10 @@ def install_environment(prefix, version, additional_dependencies):
|
|||
rmtree(pkgdir)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
with in_env(hook.prefix):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,37 +1,54 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import random
|
||||
|
||||
import six
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.xargs import xargs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
FIXED_RANDOM_SEED = 1542676186
|
||||
|
||||
|
||||
def run_setup_cmd(prefix, cmd):
|
||||
def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:
|
||||
cmd_output_b(*cmd, cwd=prefix.prefix_dir)
|
||||
|
||||
|
||||
def environment_dir(ENVIRONMENT_DIR, language_version):
|
||||
if ENVIRONMENT_DIR is None:
|
||||
@overload
|
||||
def environment_dir(d: None, language_version: str) -> None: ...
|
||||
@overload
|
||||
def environment_dir(d: str, language_version: str) -> str: ...
|
||||
|
||||
|
||||
def environment_dir(d: Optional[str], language_version: str) -> Optional[str]:
|
||||
if d is None:
|
||||
return None
|
||||
else:
|
||||
return '{}-{}'.format(ENVIRONMENT_DIR, language_version)
|
||||
return f'{d}-{language_version}'
|
||||
|
||||
|
||||
def assert_version_default(binary, version):
|
||||
def assert_version_default(binary: str, version: str) -> None:
|
||||
if version != C.DEFAULT:
|
||||
raise AssertionError(
|
||||
'For now, pre-commit requires system-installed {}'.format(binary),
|
||||
f'For now, pre-commit requires system-installed {binary}',
|
||||
)
|
||||
|
||||
|
||||
def assert_no_additional_deps(lang, additional_deps):
|
||||
def assert_no_additional_deps(
|
||||
lang: str,
|
||||
additional_deps: Sequence[str],
|
||||
) -> None:
|
||||
if additional_deps:
|
||||
raise AssertionError(
|
||||
'For now, pre-commit does not support '
|
||||
|
|
@ -39,19 +56,23 @@ def assert_no_additional_deps(lang, additional_deps):
|
|||
)
|
||||
|
||||
|
||||
def basic_get_default_version():
|
||||
def basic_get_default_version() -> str:
|
||||
return C.DEFAULT
|
||||
|
||||
|
||||
def basic_healthy(prefix, language_version):
|
||||
def basic_healthy(prefix: Prefix, language_version: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def no_install(prefix, version, additional_dependencies):
|
||||
def no_install(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
additional_dependencies: Sequence[str],
|
||||
) -> NoReturn:
|
||||
raise AssertionError('This type is not installable')
|
||||
|
||||
|
||||
def target_concurrency(hook):
|
||||
def target_concurrency(hook: 'Hook') -> int:
|
||||
if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ:
|
||||
return 1
|
||||
else:
|
||||
|
|
@ -65,20 +86,22 @@ def target_concurrency(hook):
|
|||
return 1
|
||||
|
||||
|
||||
def _shuffled(seq):
|
||||
"""Deterministically shuffle identically under both py2 + py3."""
|
||||
def _shuffled(seq: Sequence[str]) -> List[str]:
|
||||
"""Deterministically shuffle"""
|
||||
fixed_random = random.Random()
|
||||
if six.PY2: # pragma: no cover (py2)
|
||||
fixed_random.seed(FIXED_RANDOM_SEED)
|
||||
else: # pragma: no cover (py3)
|
||||
fixed_random.seed(FIXED_RANDOM_SEED, version=1)
|
||||
fixed_random.seed(FIXED_RANDOM_SEED, version=1)
|
||||
|
||||
seq = list(seq)
|
||||
random.shuffle(seq, random=fixed_random.random)
|
||||
return seq
|
||||
|
||||
|
||||
def run_xargs(hook, cmd, file_args, **kwargs):
|
||||
def run_xargs(
|
||||
hook: 'Hook',
|
||||
cmd: Tuple[str, ...],
|
||||
file_args: Sequence[str],
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes]:
|
||||
# Shuffle the files so that they more evenly fill out the xargs partitions,
|
||||
# but do it deterministically in case a hook cares about ordering.
|
||||
file_args = _shuffled(file_args)
|
||||
|
|
|
|||
|
|
@ -1,30 +1,36 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.languages.python import bin_dir
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'node_env'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def _envdir(prefix, version):
|
||||
def _envdir(prefix: Prefix, version: str) -> str:
|
||||
directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
|
||||
return prefix.path(directory)
|
||||
|
||||
|
||||
def get_env_patch(venv): # pragma: windows no cover
|
||||
def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
|
||||
if sys.platform == 'cygwin': # pragma: no cover
|
||||
_, win_venv, _ = cmd_output('cygpath', '-w', venv)
|
||||
install_prefix = r'{}\bin'.format(win_venv.strip())
|
||||
|
|
@ -45,14 +51,17 @@ def get_env_patch(venv): # pragma: windows no cover
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix, language_version): # pragma: windows no cover
|
||||
def in_env(
|
||||
prefix: Prefix,
|
||||
language_version: str,
|
||||
) -> Generator[None, None, None]: # pragma: windows no cover
|
||||
with envcontext(get_env_patch(_envdir(prefix, language_version))):
|
||||
yield
|
||||
|
||||
|
||||
def install_environment(
|
||||
prefix, version, additional_dependencies,
|
||||
): # pragma: windows no cover
|
||||
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
|
||||
) -> None: # pragma: windows no cover
|
||||
additional_dependencies = tuple(additional_dependencies)
|
||||
assert prefix.exists('package.json')
|
||||
envdir = _envdir(prefix, version)
|
||||
|
|
@ -78,6 +87,10 @@ def install_environment(
|
|||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color): # pragma: windows no cover
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
with in_env(hook.prefix, hook.language_version):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,18 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit import output
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.xargs import xargs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = None
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
|
|
@ -16,18 +20,18 @@ healthy = helpers.basic_healthy
|
|||
install_environment = helpers.no_install
|
||||
|
||||
|
||||
def _process_filename_by_line(pattern, filename):
|
||||
def _process_filename_by_line(pattern: Pattern[bytes], filename: str) -> int:
|
||||
retv = 0
|
||||
with open(filename, 'rb') as f:
|
||||
for line_no, line in enumerate(f, start=1):
|
||||
if pattern.search(line):
|
||||
retv = 1
|
||||
output.write('{}:{}:'.format(filename, line_no))
|
||||
output.write(f'{filename}:{line_no}:')
|
||||
output.write_line(line.rstrip(b'\r\n'))
|
||||
return retv
|
||||
|
||||
|
||||
def _process_filename_at_once(pattern, filename):
|
||||
def _process_filename_at_once(pattern: Pattern[bytes], filename: str) -> int:
|
||||
retv = 0
|
||||
with open(filename, 'rb') as f:
|
||||
contents = f.read()
|
||||
|
|
@ -44,12 +48,16 @@ def _process_filename_at_once(pattern, filename):
|
|||
return retv
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
exe = (sys.executable, '-m', __name__) + tuple(hook.args) + (hook.entry,)
|
||||
return xargs(exe, file_args, color=color)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
'grep-like finder using python regexes. Unlike grep, this tool '
|
||||
|
|
|
|||
|
|
@ -1,31 +1,41 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
from typing import Callable
|
||||
from typing import ContextManager
|
||||
from typing import Generator
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import UNSET
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.parse_shebang import find_executable
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'py_env'
|
||||
|
||||
|
||||
def bin_dir(venv):
|
||||
def bin_dir(venv: str) -> str:
|
||||
"""On windows there's a different directory for the virtualenv"""
|
||||
bin_part = 'Scripts' if os.name == 'nt' else 'bin'
|
||||
return os.path.join(venv, bin_part)
|
||||
|
||||
|
||||
def get_env_patch(venv):
|
||||
def get_env_patch(venv: str) -> PatchesT:
|
||||
return (
|
||||
('PYTHONHOME', UNSET),
|
||||
('VIRTUAL_ENV', venv),
|
||||
|
|
@ -33,7 +43,9 @@ def get_env_patch(venv):
|
|||
)
|
||||
|
||||
|
||||
def _find_by_py_launcher(version): # pragma: no cover (windows only)
|
||||
def _find_by_py_launcher(
|
||||
version: str,
|
||||
) -> Optional[str]: # pragma: no cover (windows only)
|
||||
if version.startswith('python'):
|
||||
try:
|
||||
return cmd_output(
|
||||
|
|
@ -42,14 +54,16 @@ def _find_by_py_launcher(version): # pragma: no cover (windows only)
|
|||
)[1].strip()
|
||||
except CalledProcessError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _find_by_sys_executable():
|
||||
def _norm(path):
|
||||
def _find_by_sys_executable() -> Optional[str]:
|
||||
def _norm(path: str) -> Optional[str]:
|
||||
_, exe = os.path.split(path.lower())
|
||||
exe, _, _ = exe.partition('.exe')
|
||||
if find_executable(exe) and exe not in {'python', 'pythonw'}:
|
||||
return exe
|
||||
return None
|
||||
|
||||
# On linux, I see these common sys.executables:
|
||||
#
|
||||
|
|
@ -66,7 +80,8 @@ def _find_by_sys_executable():
|
|||
return None
|
||||
|
||||
|
||||
def _get_default_version(): # pragma: no cover (platform dependent)
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def get_default_version() -> str: # pragma: no cover (platform dependent)
|
||||
# First attempt from `sys.executable` (or the realpath)
|
||||
exe = _find_by_sys_executable()
|
||||
if exe:
|
||||
|
|
@ -88,16 +103,7 @@ def _get_default_version(): # pragma: no cover (platform dependent)
|
|||
return C.DEFAULT
|
||||
|
||||
|
||||
def get_default_version():
|
||||
# TODO: when dropping python2, use `functools.lru_cache(maxsize=1)`
|
||||
try:
|
||||
return get_default_version.cached_version
|
||||
except AttributeError:
|
||||
get_default_version.cached_version = _get_default_version()
|
||||
return get_default_version()
|
||||
|
||||
|
||||
def _sys_executable_matches(version):
|
||||
def _sys_executable_matches(version: str) -> bool:
|
||||
if version == 'python':
|
||||
return True
|
||||
elif not version.startswith('python'):
|
||||
|
|
@ -111,7 +117,7 @@ def _sys_executable_matches(version):
|
|||
return sys.version_info[:len(info)] == info
|
||||
|
||||
|
||||
def norm_version(version):
|
||||
def norm_version(version: str) -> str:
|
||||
# first see if our current executable is appropriate
|
||||
if _sys_executable_matches(version):
|
||||
return sys.executable
|
||||
|
|
@ -135,14 +141,25 @@ def norm_version(version):
|
|||
return os.path.expanduser(version)
|
||||
|
||||
|
||||
def py_interface(_dir, _make_venv):
|
||||
def py_interface(
|
||||
_dir: str,
|
||||
_make_venv: Callable[[str, str], None],
|
||||
) -> Tuple[
|
||||
Callable[[Prefix, str], ContextManager[None]],
|
||||
Callable[[Prefix, str], bool],
|
||||
Callable[['Hook', Sequence[str], bool], Tuple[int, bytes]],
|
||||
Callable[[Prefix, str, Sequence[str]], None],
|
||||
]:
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix, language_version):
|
||||
def in_env(
|
||||
prefix: Prefix,
|
||||
language_version: str,
|
||||
) -> Generator[None, None, None]:
|
||||
envdir = prefix.path(helpers.environment_dir(_dir, language_version))
|
||||
with envcontext(get_env_patch(envdir)):
|
||||
yield
|
||||
|
||||
def healthy(prefix, language_version):
|
||||
def healthy(prefix: Prefix, language_version: str) -> bool:
|
||||
with in_env(prefix, language_version):
|
||||
retcode, _, _ = cmd_output_b(
|
||||
'python', '-c',
|
||||
|
|
@ -152,11 +169,19 @@ def py_interface(_dir, _make_venv):
|
|||
)
|
||||
return retcode == 0
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
with in_env(hook.prefix, hook.language_version):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
||||
def install_environment(prefix, version, additional_dependencies):
|
||||
def install_environment(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
additional_dependencies: Sequence[str],
|
||||
) -> None:
|
||||
additional_dependencies = tuple(additional_dependencies)
|
||||
directory = helpers.environment_dir(_dir, version)
|
||||
|
||||
|
|
@ -175,7 +200,7 @@ def py_interface(_dir, _make_venv):
|
|||
return in_env, healthy, run_hook, install_environment
|
||||
|
||||
|
||||
def make_venv(envdir, python):
|
||||
def make_venv(envdir: str, python: str) -> None:
|
||||
env = dict(os.environ, VIRTUALENV_NO_DOWNLOAD='1')
|
||||
cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python)
|
||||
cmd_output_b(*cmd, env=env, cwd='/')
|
||||
|
|
|
|||
|
|
@ -1,25 +1,15 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from pre_commit.languages import python
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
|
||||
ENVIRONMENT_DIR = 'py_venv'
|
||||
get_default_version = python.get_default_version
|
||||
|
||||
|
||||
def get_default_version(): # pragma: no cover (version specific)
|
||||
if sys.version_info < (3,):
|
||||
return 'python3'
|
||||
else:
|
||||
return python.get_default_version()
|
||||
|
||||
|
||||
def orig_py_exe(exe): # pragma: no cover (platform specific)
|
||||
def orig_py_exe(exe: str) -> str: # pragma: no cover (platform specific)
|
||||
"""A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
|
||||
packages to the incorrect location. Attempt to find the _original_ exe
|
||||
and invoke `-mvenv` from there.
|
||||
|
|
@ -48,7 +38,7 @@ def orig_py_exe(exe): # pragma: no cover (platform specific)
|
|||
return exe
|
||||
|
||||
|
||||
def make_venv(envdir, python):
|
||||
def make_venv(envdir: str, python: str) -> None:
|
||||
cmd_output_b(orig_py_exe(python), '-mvenv', envdir, cwd='/')
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,27 +1,35 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os.path
|
||||
import shutil
|
||||
import tarfile
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import resource_bytesio
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_comit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'rbenv'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def get_env_patch(venv, language_version): # pragma: windows no cover
|
||||
patches = (
|
||||
def get_env_patch(
|
||||
venv: str,
|
||||
language_version: str,
|
||||
) -> PatchesT: # pragma: windows no cover
|
||||
patches: PatchesT = (
|
||||
('GEM_HOME', os.path.join(venv, 'gems')),
|
||||
('RBENV_ROOT', venv),
|
||||
('BUNDLE_IGNORE_CONFIG', '1'),
|
||||
|
|
@ -38,8 +46,11 @@ def get_env_patch(venv, language_version): # pragma: windows no cover
|
|||
return patches
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix, language_version): # pragma: windows no cover
|
||||
@contextlib.contextmanager # pragma: windows no cover
|
||||
def in_env(
|
||||
prefix: Prefix,
|
||||
language_version: str,
|
||||
) -> Generator[None, None, None]:
|
||||
envdir = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, language_version),
|
||||
)
|
||||
|
|
@ -47,13 +58,16 @@ def in_env(prefix, language_version): # pragma: windows no cover
|
|||
yield
|
||||
|
||||
|
||||
def _extract_resource(filename, dest):
|
||||
def _extract_resource(filename: str, dest: str) -> None:
|
||||
with resource_bytesio(filename) as bio:
|
||||
with tarfile.open(fileobj=bio) as tf:
|
||||
tf.extractall(dest)
|
||||
|
||||
|
||||
def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover
|
||||
def _install_rbenv(
|
||||
prefix: Prefix,
|
||||
version: str = C.DEFAULT,
|
||||
) -> None: # pragma: windows no cover
|
||||
directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
|
||||
|
||||
_extract_resource('rbenv.tar.gz', prefix.path('.'))
|
||||
|
|
@ -66,7 +80,7 @@ def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover
|
|||
_extract_resource('ruby-build.tar.gz', plugins_dir)
|
||||
|
||||
activate_path = prefix.path(directory, 'bin', 'activate')
|
||||
with io.open(activate_path, 'w') as activate_file:
|
||||
with open(activate_path, 'w') as activate_file:
|
||||
# This is similar to how you would install rbenv to your home directory
|
||||
# However we do a couple things to make the executables exposed and
|
||||
# configure it to work in our directory.
|
||||
|
|
@ -86,10 +100,13 @@ def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover
|
|||
|
||||
# If we aren't using the system ruby, add a version here
|
||||
if version != C.DEFAULT:
|
||||
activate_file.write('export RBENV_VERSION="{}"\n'.format(version))
|
||||
activate_file.write(f'export RBENV_VERSION="{version}"\n')
|
||||
|
||||
|
||||
def _install_ruby(prefix, version): # pragma: windows no cover
|
||||
def _install_ruby(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
) -> None: # pragma: windows no cover
|
||||
try:
|
||||
helpers.run_setup_cmd(prefix, ('rbenv', 'download', version))
|
||||
except CalledProcessError: # pragma: no cover (usually find with download)
|
||||
|
|
@ -98,8 +115,8 @@ def _install_ruby(prefix, version): # pragma: windows no cover
|
|||
|
||||
|
||||
def install_environment(
|
||||
prefix, version, additional_dependencies,
|
||||
): # pragma: windows no cover
|
||||
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
|
||||
) -> None: # pragma: windows no cover
|
||||
additional_dependencies = tuple(additional_dependencies)
|
||||
directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
|
||||
with clean_path_on_failure(prefix.path(directory)):
|
||||
|
|
@ -124,6 +141,10 @@ def install_environment(
|
|||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color): # pragma: windows no cover
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
with in_env(hook.prefix, hook.language_version):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,31 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import toml
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'rustenv'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
||||
|
||||
def get_env_patch(target_dir):
|
||||
def get_env_patch(target_dir: str) -> PatchesT:
|
||||
return (
|
||||
(
|
||||
'PATH',
|
||||
|
|
@ -28,7 +35,7 @@ def get_env_patch(target_dir):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix):
|
||||
def in_env(prefix: Prefix) -> Generator[None, None, None]:
|
||||
target_dir = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
|
||||
)
|
||||
|
|
@ -36,7 +43,10 @@ def in_env(prefix):
|
|||
yield
|
||||
|
||||
|
||||
def _add_dependencies(cargo_toml_path, additional_dependencies):
|
||||
def _add_dependencies(
|
||||
cargo_toml_path: str,
|
||||
additional_dependencies: Set[str],
|
||||
) -> None:
|
||||
with open(cargo_toml_path, 'r+') as f:
|
||||
cargo_toml = toml.load(f)
|
||||
cargo_toml.setdefault('dependencies', {})
|
||||
|
|
@ -48,7 +58,11 @@ def _add_dependencies(cargo_toml_path, additional_dependencies):
|
|||
f.truncate()
|
||||
|
||||
|
||||
def install_environment(prefix, version, additional_dependencies):
|
||||
def install_environment(
|
||||
prefix: Prefix,
|
||||
version: str,
|
||||
additional_dependencies: Sequence[str],
|
||||
) -> None:
|
||||
helpers.assert_version_default('rust', version)
|
||||
directory = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
|
||||
|
|
@ -73,7 +87,7 @@ def install_environment(prefix, version, additional_dependencies):
|
|||
_add_dependencies(prefix.path('Cargo.toml'), lib_deps)
|
||||
|
||||
with clean_path_on_failure(directory):
|
||||
packages_to_install = {('--path', '.')}
|
||||
packages_to_install: Set[Tuple[str, ...]] = {('--path', '.')}
|
||||
for cli_dep in cli_deps:
|
||||
cli_dep = cli_dep[len('cli:'):]
|
||||
package, _, version = cli_dep.partition(':')
|
||||
|
|
@ -82,13 +96,17 @@ def install_environment(prefix, version, additional_dependencies):
|
|||
else:
|
||||
packages_to_install.add((package,))
|
||||
|
||||
for package in packages_to_install:
|
||||
for args in packages_to_install:
|
||||
cmd_output_b(
|
||||
'cargo', 'install', '--bins', '--root', directory, *package,
|
||||
cwd=prefix.prefix_dir
|
||||
'cargo', 'install', '--bins', '--root', directory, *args,
|
||||
cwd=prefix.prefix_dir,
|
||||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
with in_env(hook.prefix):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from __future__ import unicode_literals
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.languages import helpers
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = None
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
|
|
@ -9,7 +13,11 @@ healthy = helpers.basic_healthy
|
|||
install_environment = helpers.no_install
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
cmd = hook.cmd
|
||||
cmd = (hook.prefix.path(cmd[0]),) + cmd[1:]
|
||||
return helpers.run_xargs(hook, cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,22 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
from typing import Generator
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import PatchesT
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
ENVIRONMENT_DIR = 'swift_env'
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
healthy = helpers.basic_healthy
|
||||
|
|
@ -17,13 +24,13 @@ BUILD_DIR = '.build'
|
|||
BUILD_CONFIG = 'release'
|
||||
|
||||
|
||||
def get_env_patch(venv): # pragma: windows no cover
|
||||
def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
|
||||
bin_path = os.path.join(venv, BUILD_DIR, BUILD_CONFIG)
|
||||
return (('PATH', (bin_path, os.pathsep, Var('PATH'))),)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_env(prefix): # pragma: windows no cover
|
||||
@contextlib.contextmanager # pragma: windows no cover
|
||||
def in_env(prefix: Prefix) -> Generator[None, None, None]:
|
||||
envdir = prefix.path(
|
||||
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
|
||||
)
|
||||
|
|
@ -32,8 +39,8 @@ def in_env(prefix): # pragma: windows no cover
|
|||
|
||||
|
||||
def install_environment(
|
||||
prefix, version, additional_dependencies,
|
||||
): # pragma: windows no cover
|
||||
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
|
||||
) -> None: # pragma: windows no cover
|
||||
helpers.assert_version_default('swift', version)
|
||||
helpers.assert_no_additional_deps('swift', additional_dependencies)
|
||||
directory = prefix.path(
|
||||
|
|
@ -51,6 +58,10 @@ def install_environment(
|
|||
)
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color): # pragma: windows no cover
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]: # pragma: windows no cover
|
||||
with in_env(hook.prefix):
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
from __future__ import unicode_literals
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pre_commit.languages import helpers
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pre_commit.repository import Hook
|
||||
|
||||
|
||||
ENVIRONMENT_DIR = None
|
||||
get_default_version = helpers.basic_get_default_version
|
||||
|
|
@ -9,5 +14,9 @@ healthy = helpers.basic_healthy
|
|||
install_environment = helpers.no_install
|
||||
|
||||
|
||||
def run_hook(hook, file_args, color):
|
||||
def run_hook(
|
||||
hook: 'Hook',
|
||||
file_args: Sequence[str],
|
||||
color: bool,
|
||||
) -> Tuple[int, bytes]:
|
||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
from typing import Generator
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit import output
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
LOG_LEVEL_COLORS = {
|
||||
|
|
@ -18,15 +16,15 @@ LOG_LEVEL_COLORS = {
|
|||
|
||||
|
||||
class LoggingHandler(logging.Handler):
|
||||
def __init__(self, use_color):
|
||||
super(LoggingHandler, self).__init__()
|
||||
def __init__(self, use_color: bool) -> None:
|
||||
super().__init__()
|
||||
self.use_color = use_color
|
||||
|
||||
def emit(self, record):
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
output.write_line(
|
||||
'{} {}'.format(
|
||||
color.format_color(
|
||||
'[{}]'.format(record.levelname),
|
||||
f'[{record.levelname}]',
|
||||
LOG_LEVEL_COLORS[record.levelname],
|
||||
self.use_color,
|
||||
),
|
||||
|
|
@ -36,8 +34,8 @@ class LoggingHandler(logging.Handler):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def logging_handler(*args, **kwargs):
|
||||
handler = LoggingHandler(*args, **kwargs)
|
||||
def logging_handler(use_color: bool) -> Generator[None, None, None]:
|
||||
handler = LoggingHandler(use_color)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Union
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import color
|
||||
|
|
@ -39,7 +41,7 @@ os.environ.pop('__PYVENV_LAUNCHER__', None)
|
|||
COMMANDS_NO_GIT = {'clean', 'gc', 'init-templatedir', 'sample-config'}
|
||||
|
||||
|
||||
def _add_color_option(parser):
|
||||
def _add_color_option(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
'--color', default=os.environ.get('PRE_COMMIT_COLOR', 'auto'),
|
||||
type=color.use_color,
|
||||
|
|
@ -48,7 +50,7 @@ def _add_color_option(parser):
|
|||
)
|
||||
|
||||
|
||||
def _add_config_option(parser):
|
||||
def _add_config_option(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
'-c', '--config', default=C.CONFIG_FILE,
|
||||
help='Path to alternate config file',
|
||||
|
|
@ -56,18 +58,24 @@ def _add_config_option(parser):
|
|||
|
||||
|
||||
class AppendReplaceDefault(argparse.Action):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AppendReplaceDefault, self).__init__(*args, **kwargs)
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.appended = False
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
def __call__(
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: Union[str, Sequence[str], None],
|
||||
option_string: Optional[str] = None,
|
||||
) -> None:
|
||||
if not self.appended:
|
||||
setattr(namespace, self.dest, [])
|
||||
self.appended = True
|
||||
getattr(namespace, self.dest).append(values)
|
||||
|
||||
|
||||
def _add_hook_type_option(parser):
|
||||
def _add_hook_type_option(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
'-t', '--hook-type', choices=(
|
||||
'pre-commit', 'pre-merge-commit', 'pre-push',
|
||||
|
|
@ -79,7 +87,7 @@ def _add_hook_type_option(parser):
|
|||
)
|
||||
|
||||
|
||||
def _add_run_options(parser):
|
||||
def _add_run_options(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument('hook', nargs='?', help='A single hook-id to run')
|
||||
parser.add_argument('--verbose', '-v', action='store_true', default=False)
|
||||
parser.add_argument(
|
||||
|
|
@ -113,7 +121,7 @@ def _add_run_options(parser):
|
|||
)
|
||||
|
||||
|
||||
def _adjust_args_and_chdir(args):
|
||||
def _adjust_args_and_chdir(args: argparse.Namespace) -> None:
|
||||
# `--config` was specified relative to the non-root working directory
|
||||
if os.path.exists(args.config):
|
||||
args.config = os.path.abspath(args.config)
|
||||
|
|
@ -145,7 +153,7 @@ def _adjust_args_and_chdir(args):
|
|||
args.repo = os.path.relpath(args.repo)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
argv = argv if argv is not None else sys.argv[1:]
|
||||
argv = [five.to_text(arg) for arg in argv]
|
||||
parser = argparse.ArgumentParser(prog='pre-commit')
|
||||
|
|
@ -154,7 +162,7 @@ def main(argv=None):
|
|||
parser.add_argument(
|
||||
'-V', '--version',
|
||||
action='version',
|
||||
version='%(prog)s {}'.format(C.VERSION),
|
||||
version=f'%(prog)s {C.VERSION}',
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
|
|
@ -254,7 +262,7 @@ def main(argv=None):
|
|||
_add_run_options(run_parser)
|
||||
|
||||
sample_config_parser = subparsers.add_parser(
|
||||
'sample-config', help='Produce a sample {} file'.format(C.CONFIG_FILE),
|
||||
'sample-config', help=f'Produce a sample {C.CONFIG_FILE} file',
|
||||
)
|
||||
_add_color_option(sample_config_parser)
|
||||
_add_config_option(sample_config_parser)
|
||||
|
|
@ -345,11 +353,11 @@ def main(argv=None):
|
|||
return uninstall(hook_types=args.hook_types)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Command {} not implemented.'.format(args.command),
|
||||
f'Command {args.command} not implemented.',
|
||||
)
|
||||
|
||||
raise AssertionError(
|
||||
'Command {} failed to exit with a returncode'.format(args.command),
|
||||
f'Command {args.command} failed to exit with a returncode',
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import tarfile
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
from pre_commit import output
|
||||
from pre_commit.util import cmd_output_b
|
||||
|
|
@ -27,7 +25,7 @@ REPOS = (
|
|||
)
|
||||
|
||||
|
||||
def make_archive(name, repo, ref, destdir):
|
||||
def make_archive(name: str, repo: str, ref: str, destdir: str) -> str:
|
||||
"""Makes an archive of a repository in the given destdir.
|
||||
|
||||
:param text name: Name to give the archive. For instance foo. The file
|
||||
|
|
@ -53,15 +51,16 @@ def make_archive(name, repo, ref, destdir):
|
|||
return output_path
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--dest', default='pre_commit/resources')
|
||||
args = parser.parse_args(argv)
|
||||
for archive_name, repo, ref in REPOS:
|
||||
output.write_line(
|
||||
'Making {}.tar.gz for {}@{}'.format(archive_name, repo, ref),
|
||||
f'Making {archive_name}.tar.gz for {repo}@{ref}',
|
||||
)
|
||||
make_archive(archive_name, repo, ref, args.dest)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import argparse
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
|
|
@ -8,7 +10,7 @@ from pre_commit.repository import all_hooks
|
|||
from pre_commit.store import Store
|
||||
|
||||
|
||||
def check_all_hooks_match_files(config_file):
|
||||
def check_all_hooks_match_files(config_file: str) -> int:
|
||||
classifier = Classifier(git.get_all_files())
|
||||
retv = 0
|
||||
|
||||
|
|
@ -16,13 +18,13 @@ def check_all_hooks_match_files(config_file):
|
|||
if hook.always_run or hook.language == 'fail':
|
||||
continue
|
||||
elif not classifier.filenames_for_hook(hook):
|
||||
print('{} does not apply to this repository'.format(hook.id))
|
||||
print(f'{hook.id} does not apply to this repository')
|
||||
retv = 1
|
||||
|
||||
return retv
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
|
||||
args = parser.parse_args(argv)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import re
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
from cfgv import apply_defaults
|
||||
|
||||
|
|
@ -12,7 +12,11 @@ from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
|||
from pre_commit.commands.run import Classifier
|
||||
|
||||
|
||||
def exclude_matches_any(filenames, include, exclude):
|
||||
def exclude_matches_any(
|
||||
filenames: Sequence[str],
|
||||
include: str,
|
||||
exclude: str,
|
||||
) -> bool:
|
||||
if exclude == '^$':
|
||||
return True
|
||||
include_re, exclude_re = re.compile(include), re.compile(exclude)
|
||||
|
|
@ -22,7 +26,7 @@ def exclude_matches_any(filenames, include, exclude):
|
|||
return False
|
||||
|
||||
|
||||
def check_useless_excludes(config_file):
|
||||
def check_useless_excludes(config_file: str) -> int:
|
||||
config = load_config(config_file)
|
||||
classifier = Classifier(git.get_all_files())
|
||||
retv = 0
|
||||
|
|
@ -54,7 +58,7 @@ def check_useless_excludes(config_file):
|
|||
return retv
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
|
||||
args = parser.parse_args(argv)
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import sys
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
from pre_commit import output
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[Sequence[str]] = None) -> int:
|
||||
argv = argv if argv is not None else sys.argv[1:]
|
||||
for arg in argv:
|
||||
output.write_line(arg)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -1,21 +1,22 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import sys
|
||||
from typing import IO
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit import five
|
||||
from pre_commit.util import noop_context
|
||||
|
||||
|
||||
def get_hook_message(
|
||||
start,
|
||||
postfix='',
|
||||
end_msg=None,
|
||||
end_len=0,
|
||||
end_color=None,
|
||||
use_color=None,
|
||||
cols=80,
|
||||
):
|
||||
start: str,
|
||||
postfix: str = '',
|
||||
end_msg: Optional[str] = None,
|
||||
end_len: int = 0,
|
||||
end_color: Optional[str] = None,
|
||||
use_color: Optional[bool] = None,
|
||||
cols: int = 80,
|
||||
) -> str:
|
||||
"""Prints a message for running a hook.
|
||||
|
||||
This currently supports three approaches:
|
||||
|
|
@ -46,16 +47,13 @@ def get_hook_message(
|
|||
)
|
||||
start...........................................................postfix end
|
||||
"""
|
||||
if bool(end_msg) == bool(end_len):
|
||||
raise ValueError('Expected one of (`end_msg`, `end_len`)')
|
||||
if end_msg is not None and (end_color is None or use_color is None):
|
||||
raise ValueError(
|
||||
'`end_color` and `use_color` are required with `end_msg`',
|
||||
)
|
||||
|
||||
if end_len:
|
||||
assert end_msg is None, end_msg
|
||||
return start + '.' * (cols - len(start) - end_len - 1)
|
||||
else:
|
||||
assert end_msg is not None
|
||||
assert end_color is not None
|
||||
assert use_color is not None
|
||||
return '{}{}{}{}\n'.format(
|
||||
start,
|
||||
'.' * (cols - len(start) - len(postfix) - len(end_msg) - 1),
|
||||
|
|
@ -64,23 +62,22 @@ def get_hook_message(
|
|||
)
|
||||
|
||||
|
||||
stdout_byte_stream = getattr(sys.stdout, 'buffer', sys.stdout)
|
||||
|
||||
|
||||
def write(s, stream=stdout_byte_stream):
|
||||
def write(s: str, stream: IO[bytes] = sys.stdout.buffer) -> None:
|
||||
stream.write(five.to_bytes(s))
|
||||
stream.flush()
|
||||
|
||||
|
||||
def write_line(s=None, stream=stdout_byte_stream, logfile_name=None):
|
||||
output_streams = [stream]
|
||||
if logfile_name:
|
||||
ctx = open(logfile_name, 'ab')
|
||||
output_streams.append(ctx)
|
||||
else:
|
||||
ctx = noop_context()
|
||||
def write_line(
|
||||
s: Union[None, str, bytes] = None,
|
||||
stream: IO[bytes] = sys.stdout.buffer,
|
||||
logfile_name: Optional[str] = None,
|
||||
) -> None:
|
||||
with contextlib.ExitStack() as exit_stack:
|
||||
output_streams = [stream]
|
||||
if logfile_name:
|
||||
stream = exit_stack.enter_context(open(logfile_name, 'ab'))
|
||||
output_streams.append(stream)
|
||||
|
||||
with ctx:
|
||||
for output_stream in output_streams:
|
||||
if s is not None:
|
||||
output_stream.write(five.to_bytes(s))
|
||||
|
|
|
|||
|
|
@ -1,24 +1,28 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
from typing import Mapping
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from identify.identify import parse_shebang_from_file
|
||||
|
||||
|
||||
class ExecutableNotFoundError(OSError):
|
||||
def to_output(self):
|
||||
return (1, self.args[0].encode('UTF-8'), b'')
|
||||
def to_output(self) -> Tuple[int, bytes, None]:
|
||||
return (1, self.args[0].encode('UTF-8'), None)
|
||||
|
||||
|
||||
def parse_filename(filename):
|
||||
def parse_filename(filename: str) -> Tuple[str, ...]:
|
||||
if not os.path.exists(filename):
|
||||
return ()
|
||||
else:
|
||||
return parse_shebang_from_file(filename)
|
||||
|
||||
|
||||
def find_executable(exe, _environ=None):
|
||||
def find_executable(
|
||||
exe: str,
|
||||
_environ: Optional[Mapping[str, str]] = None,
|
||||
) -> Optional[str]:
|
||||
exe = os.path.normpath(exe)
|
||||
if os.sep in exe:
|
||||
return exe
|
||||
|
|
@ -42,9 +46,9 @@ def find_executable(exe, _environ=None):
|
|||
return None
|
||||
|
||||
|
||||
def normexe(orig):
|
||||
def _error(msg):
|
||||
raise ExecutableNotFoundError('Executable `{}` {}'.format(orig, msg))
|
||||
def normexe(orig: str) -> str:
|
||||
def _error(msg: str) -> NoReturn:
|
||||
raise ExecutableNotFoundError(f'Executable `{orig}` {msg}')
|
||||
|
||||
if os.sep not in orig and (not os.altsep or os.altsep not in orig):
|
||||
exe = find_executable(orig)
|
||||
|
|
@ -61,7 +65,7 @@ def normexe(orig):
|
|||
return orig
|
||||
|
||||
|
||||
def normalize_cmd(cmd):
|
||||
def normalize_cmd(cmd: Tuple[str, ...]) -> Tuple[str, ...]:
|
||||
"""Fixes for the following issues on windows
|
||||
- https://bugs.python.org/issue8557
|
||||
- windows does not parse shebangs
|
||||
|
|
|
|||
|
|
@ -1,18 +1,17 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import os.path
|
||||
from typing import NamedTuple
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class Prefix(collections.namedtuple('Prefix', ('prefix_dir',))):
|
||||
__slots__ = ()
|
||||
class Prefix(NamedTuple):
|
||||
prefix_dir: str
|
||||
|
||||
def path(self, *parts):
|
||||
def path(self, *parts: str) -> str:
|
||||
return os.path.normpath(os.path.join(self.prefix_dir, *parts))
|
||||
|
||||
def exists(self, *parts):
|
||||
def exists(self, *parts: str) -> bool:
|
||||
return os.path.exists(self.path(*parts))
|
||||
|
||||
def star(self, end):
|
||||
def star(self, end: str) -> Tuple[str, ...]:
|
||||
paths = os.listdir(self.prefix_dir)
|
||||
return tuple(path for path in paths if path.endswith(end))
|
||||
|
|
|
|||
|
|
@ -1,11 +1,15 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shlex
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
|
|
@ -16,6 +20,7 @@ from pre_commit.clientlib import META
|
|||
from pre_commit.languages.all import languages
|
||||
from pre_commit.languages.helpers import environment_dir
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import parse_version
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
|
|
@ -23,27 +28,27 @@ from pre_commit.util import rmtree
|
|||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def _state(additional_deps):
|
||||
def _state(additional_deps: Sequence[str]) -> object:
|
||||
return {'additional_dependencies': sorted(additional_deps)}
|
||||
|
||||
|
||||
def _state_filename(prefix, venv):
|
||||
def _state_filename(prefix: Prefix, venv: str) -> str:
|
||||
return prefix.path(venv, '.install_state_v' + C.INSTALLED_STATE_VERSION)
|
||||
|
||||
|
||||
def _read_state(prefix, venv):
|
||||
def _read_state(prefix: Prefix, venv: str) -> Optional[object]:
|
||||
filename = _state_filename(prefix, venv)
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
else:
|
||||
with io.open(filename) as f:
|
||||
with open(filename) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def _write_state(prefix, venv, state):
|
||||
def _write_state(prefix: Prefix, venv: str, state: object) -> None:
|
||||
state_filename = _state_filename(prefix, venv)
|
||||
staging = state_filename + 'staging'
|
||||
with io.open(staging, 'w') as state_file:
|
||||
with open(staging, 'w') as state_file:
|
||||
state_file.write(five.to_text(json.dumps(state)))
|
||||
# Move the file into place atomically to indicate we've installed
|
||||
os.rename(staging, state_filename)
|
||||
|
|
@ -52,15 +57,36 @@ def _write_state(prefix, venv, state):
|
|||
_KEYS = tuple(item.key for item in MANIFEST_HOOK_DICT.items)
|
||||
|
||||
|
||||
class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
|
||||
__slots__ = ()
|
||||
class Hook(NamedTuple):
|
||||
src: str
|
||||
prefix: Prefix
|
||||
id: str
|
||||
name: str
|
||||
entry: str
|
||||
language: str
|
||||
alias: str
|
||||
files: str
|
||||
exclude: str
|
||||
types: Sequence[str]
|
||||
exclude_types: Sequence[str]
|
||||
additional_dependencies: Sequence[str]
|
||||
args: Sequence[str]
|
||||
always_run: bool
|
||||
pass_filenames: bool
|
||||
description: str
|
||||
language_version: str
|
||||
log_file: str
|
||||
minimum_pre_commit_version: str
|
||||
require_serial: bool
|
||||
stages: Sequence[str]
|
||||
verbose: bool
|
||||
|
||||
@property
|
||||
def cmd(self):
|
||||
def cmd(self) -> Tuple[str, ...]:
|
||||
return tuple(shlex.split(self.entry)) + tuple(self.args)
|
||||
|
||||
@property
|
||||
def install_key(self):
|
||||
def install_key(self) -> Tuple[Prefix, str, str, Tuple[str, ...]]:
|
||||
return (
|
||||
self.prefix,
|
||||
self.language,
|
||||
|
|
@ -68,7 +94,7 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
|
|||
tuple(self.additional_dependencies),
|
||||
)
|
||||
|
||||
def installed(self):
|
||||
def installed(self) -> bool:
|
||||
lang = languages[self.language]
|
||||
venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version)
|
||||
return (
|
||||
|
|
@ -81,12 +107,13 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
|
|||
)
|
||||
)
|
||||
|
||||
def install(self):
|
||||
logger.info('Installing environment for {}.'.format(self.src))
|
||||
def install(self) -> None:
|
||||
logger.info(f'Installing environment for {self.src}.')
|
||||
logger.info('Once installed this environment will be reused.')
|
||||
logger.info('This may take a few minutes...')
|
||||
|
||||
lang = languages[self.language]
|
||||
assert lang.ENVIRONMENT_DIR is not None
|
||||
venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version)
|
||||
|
||||
# There's potentially incomplete cleanup from previous runs
|
||||
|
|
@ -100,12 +127,12 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
|
|||
# Write our state to indicate we're installed
|
||||
_write_state(self.prefix, venv, _state(self.additional_dependencies))
|
||||
|
||||
def run(self, file_args, color):
|
||||
def run(self, file_args: Sequence[str], color: bool) -> Tuple[int, bytes]:
|
||||
lang = languages[self.language]
|
||||
return lang.run_hook(self, file_args, color)
|
||||
|
||||
@classmethod
|
||||
def create(cls, src, prefix, dct):
|
||||
def create(cls, src: str, prefix: Prefix, dct: Dict[str, Any]) -> 'Hook':
|
||||
# TODO: have cfgv do this (?)
|
||||
extra_keys = set(dct) - set(_KEYS)
|
||||
if extra_keys:
|
||||
|
|
@ -116,9 +143,10 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
|
|||
return cls(src=src, prefix=prefix, **{k: dct[k] for k in _KEYS})
|
||||
|
||||
|
||||
def _hook(*hook_dicts, **kwargs):
|
||||
root_config = kwargs.pop('root_config')
|
||||
assert not kwargs, kwargs
|
||||
def _hook(
|
||||
*hook_dicts: Dict[str, Any],
|
||||
root_config: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
ret, rest = dict(hook_dicts[0]), hook_dicts[1:]
|
||||
for dct in rest:
|
||||
ret.update(dct)
|
||||
|
|
@ -146,8 +174,12 @@ def _hook(*hook_dicts, **kwargs):
|
|||
return ret
|
||||
|
||||
|
||||
def _non_cloned_repository_hooks(repo_config, store, root_config):
|
||||
def _prefix(language_name, deps):
|
||||
def _non_cloned_repository_hooks(
|
||||
repo_config: Dict[str, Any],
|
||||
store: Store,
|
||||
root_config: Dict[str, Any],
|
||||
) -> Tuple[Hook, ...]:
|
||||
def _prefix(language_name: str, deps: Sequence[str]) -> Prefix:
|
||||
language = languages[language_name]
|
||||
# pygrep / script / system / docker_image do not have
|
||||
# environments so they work out of the current directory
|
||||
|
|
@ -166,7 +198,11 @@ def _non_cloned_repository_hooks(repo_config, store, root_config):
|
|||
)
|
||||
|
||||
|
||||
def _cloned_repository_hooks(repo_config, store, root_config):
|
||||
def _cloned_repository_hooks(
|
||||
repo_config: Dict[str, Any],
|
||||
store: Store,
|
||||
root_config: Dict[str, Any],
|
||||
) -> Tuple[Hook, ...]:
|
||||
repo, rev = repo_config['repo'], repo_config['rev']
|
||||
manifest_path = os.path.join(store.clone(repo, rev), C.MANIFEST_FILE)
|
||||
by_id = {hook['id']: hook for hook in load_manifest(manifest_path)}
|
||||
|
|
@ -195,16 +231,20 @@ def _cloned_repository_hooks(repo_config, store, root_config):
|
|||
)
|
||||
|
||||
|
||||
def _repository_hooks(repo_config, store, root_config):
|
||||
def _repository_hooks(
|
||||
repo_config: Dict[str, Any],
|
||||
store: Store,
|
||||
root_config: Dict[str, Any],
|
||||
) -> Tuple[Hook, ...]:
|
||||
if repo_config['repo'] in {LOCAL, META}:
|
||||
return _non_cloned_repository_hooks(repo_config, store, root_config)
|
||||
else:
|
||||
return _cloned_repository_hooks(repo_config, store, root_config)
|
||||
|
||||
|
||||
def install_hook_envs(hooks, store):
|
||||
def _need_installed():
|
||||
seen = set()
|
||||
def install_hook_envs(hooks: Sequence[Hook], store: Store) -> None:
|
||||
def _need_installed() -> List[Hook]:
|
||||
seen: Set[Tuple[Prefix, str, str, Tuple[str, ...]]] = set()
|
||||
ret = []
|
||||
for hook in hooks:
|
||||
if hook.install_key not in seen and not hook.installed():
|
||||
|
|
@ -220,7 +260,7 @@ def install_hook_envs(hooks, store):
|
|||
hook.install()
|
||||
|
||||
|
||||
def all_hooks(root_config, store):
|
||||
def all_hooks(root_config: Dict[str, Any], store: Store) -> Tuple[Hook, ...]:
|
||||
return tuple(
|
||||
hook
|
||||
for repo in root_config['repos']
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
#!/usr/bin/env python3
|
||||
"""File generated by pre-commit: https://pre-commit.com"""
|
||||
from __future__ import print_function
|
||||
|
||||
import distutils.spawn
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Tuple
|
||||
|
||||
# work around https://github.com/Homebrew/homebrew-core/issues/30445
|
||||
os.environ.pop('__PYVENV_LAUNCHER__', None)
|
||||
|
|
@ -14,10 +15,10 @@ HERE = os.path.dirname(os.path.abspath(__file__))
|
|||
Z40 = '0' * 40
|
||||
ID_HASH = '138fd403232d2ddd5efb44317e38bf03'
|
||||
# start templated
|
||||
CONFIG = None
|
||||
HOOK_TYPE = None
|
||||
INSTALL_PYTHON = None
|
||||
SKIP_ON_MISSING_CONFIG = None
|
||||
CONFIG = ''
|
||||
HOOK_TYPE = ''
|
||||
INSTALL_PYTHON = ''
|
||||
SKIP_ON_MISSING_CONFIG = False
|
||||
# end templated
|
||||
|
||||
|
||||
|
|
@ -29,7 +30,7 @@ class FatalError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def _norm_exe(exe):
|
||||
def _norm_exe(exe: str) -> Tuple[str, ...]:
|
||||
"""Necessary for shebang support on windows.
|
||||
|
||||
roughly lifted from `identify.identify.parse_shebang`
|
||||
|
|
@ -48,7 +49,7 @@ def _norm_exe(exe):
|
|||
return tuple(cmd)
|
||||
|
||||
|
||||
def _run_legacy():
|
||||
def _run_legacy() -> Tuple[int, bytes]:
|
||||
if __file__.endswith('.legacy'):
|
||||
raise SystemExit(
|
||||
"bug: pre-commit's script is installed in migration mode\n"
|
||||
|
|
@ -60,11 +61,11 @@ def _run_legacy():
|
|||
)
|
||||
|
||||
if HOOK_TYPE == 'pre-push':
|
||||
stdin = getattr(sys.stdin, 'buffer', sys.stdin).read()
|
||||
stdin = sys.stdin.buffer.read()
|
||||
else:
|
||||
stdin = None
|
||||
stdin = b''
|
||||
|
||||
legacy_hook = os.path.join(HERE, '{}.legacy'.format(HOOK_TYPE))
|
||||
legacy_hook = os.path.join(HERE, f'{HOOK_TYPE}.legacy')
|
||||
if os.access(legacy_hook, os.X_OK):
|
||||
cmd = _norm_exe(legacy_hook) + (legacy_hook,) + tuple(sys.argv[1:])
|
||||
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE if stdin else None)
|
||||
|
|
@ -74,7 +75,7 @@ def _run_legacy():
|
|||
return 0, stdin
|
||||
|
||||
|
||||
def _validate_config():
|
||||
def _validate_config() -> None:
|
||||
cmd = ('git', 'rev-parse', '--show-toplevel')
|
||||
top_level = subprocess.check_output(cmd).decode('UTF-8').strip()
|
||||
cfg = os.path.join(top_level, CONFIG)
|
||||
|
|
@ -98,7 +99,7 @@ def _validate_config():
|
|||
)
|
||||
|
||||
|
||||
def _exe():
|
||||
def _exe() -> Tuple[str, ...]:
|
||||
with open(os.devnull, 'wb') as devnull:
|
||||
for exe in (INSTALL_PYTHON, sys.executable):
|
||||
try:
|
||||
|
|
@ -118,14 +119,14 @@ def _exe():
|
|||
)
|
||||
|
||||
|
||||
def _rev_exists(rev):
|
||||
def _rev_exists(rev: str) -> bool:
|
||||
return not subprocess.call(('git', 'rev-list', '--quiet', rev))
|
||||
|
||||
|
||||
def _pre_push(stdin):
|
||||
def _pre_push(stdin: bytes) -> Tuple[str, ...]:
|
||||
remote = sys.argv[1]
|
||||
|
||||
opts = ()
|
||||
opts: Tuple[str, ...] = ()
|
||||
for line in stdin.decode('UTF-8').splitlines():
|
||||
_, local_sha, _, remote_sha = line.split()
|
||||
if local_sha == Z40:
|
||||
|
|
@ -136,7 +137,7 @@ def _pre_push(stdin):
|
|||
# ancestors not found in remote
|
||||
ancestors = subprocess.check_output((
|
||||
'git', 'rev-list', local_sha, '--topo-order', '--reverse',
|
||||
'--not', '--remotes={}'.format(remote),
|
||||
'--not', f'--remotes={remote}',
|
||||
)).decode().strip()
|
||||
if not ancestors:
|
||||
continue
|
||||
|
|
@ -148,8 +149,8 @@ def _pre_push(stdin):
|
|||
# pushing the whole tree including root commit
|
||||
opts = ('--all-files',)
|
||||
else:
|
||||
cmd = ('git', 'rev-parse', '{}^'.format(first_ancestor))
|
||||
source = subprocess.check_output(cmd).decode().strip()
|
||||
rev_cmd = ('git', 'rev-parse', f'{first_ancestor}^')
|
||||
source = subprocess.check_output(rev_cmd).decode().strip()
|
||||
opts = ('--origin', local_sha, '--source', source)
|
||||
|
||||
if opts:
|
||||
|
|
@ -159,8 +160,8 @@ def _pre_push(stdin):
|
|||
raise EarlyExit()
|
||||
|
||||
|
||||
def _opts(stdin):
|
||||
fns = {
|
||||
def _opts(stdin: bytes) -> Tuple[str, ...]:
|
||||
fns: Dict[str, Callable[[bytes], Tuple[str, ...]]] = {
|
||||
'prepare-commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]),
|
||||
'commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]),
|
||||
'pre-merge-commit': lambda _: (),
|
||||
|
|
@ -172,13 +173,14 @@ def _opts(stdin):
|
|||
|
||||
|
||||
if sys.version_info < (3, 7): # https://bugs.python.org/issue25942
|
||||
def _subprocess_call(cmd): # this is the python 2.7 implementation
|
||||
# this is the python 2.7 implementation
|
||||
def _subprocess_call(cmd: Tuple[str, ...]) -> int:
|
||||
return subprocess.Popen(cmd).wait()
|
||||
else:
|
||||
_subprocess_call = subprocess.call
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> int:
|
||||
retv, stdin = _run_legacy()
|
||||
try:
|
||||
_validate_config()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import logging
|
||||
import os.path
|
||||
import time
|
||||
from typing import Generator
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.util import CalledProcessError
|
||||
|
|
@ -17,7 +15,7 @@ from pre_commit.xargs import xargs
|
|||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def _git_apply(patch):
|
||||
def _git_apply(patch: str) -> None:
|
||||
args = ('apply', '--whitespace=nowarn', patch)
|
||||
try:
|
||||
cmd_output_b('git', *args)
|
||||
|
|
@ -27,7 +25,7 @@ def _git_apply(patch):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _intent_to_add_cleared():
|
||||
def _intent_to_add_cleared() -> Generator[None, None, None]:
|
||||
intent_to_add = git.intent_to_add_files()
|
||||
if intent_to_add:
|
||||
logger.warning('Unstaged intent-to-add files detected.')
|
||||
|
|
@ -42,7 +40,7 @@ def _intent_to_add_cleared():
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _unstaged_changes_cleared(patch_dir):
|
||||
def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
|
||||
tree = cmd_output('git', 'write-tree')[1].strip()
|
||||
retcode, diff_stdout_binary, _ = cmd_output_b(
|
||||
'git', 'diff-index', '--ignore-submodules', '--binary',
|
||||
|
|
@ -54,11 +52,11 @@ def _unstaged_changes_cleared(patch_dir):
|
|||
patch_filename = os.path.join(patch_dir, patch_filename)
|
||||
logger.warning('Unstaged files detected.')
|
||||
logger.info(
|
||||
'Stashing unstaged files to {}.'.format(patch_filename),
|
||||
f'Stashing unstaged files to {patch_filename}.',
|
||||
)
|
||||
# Save the current unstaged changes as a patch
|
||||
mkdirp(patch_dir)
|
||||
with io.open(patch_filename, 'wb') as patch_file:
|
||||
with open(patch_filename, 'wb') as patch_file:
|
||||
patch_file.write(diff_stdout_binary)
|
||||
|
||||
# Clear the working directory of unstaged changes
|
||||
|
|
@ -79,7 +77,7 @@ def _unstaged_changes_cleared(patch_dir):
|
|||
# Roll back the changes made by hooks.
|
||||
cmd_output_b('git', 'checkout', '--', '.')
|
||||
_git_apply(patch_filename)
|
||||
logger.info('Restored changes from {}.'.format(patch_filename))
|
||||
logger.info(f'Restored changes from {patch_filename}.')
|
||||
else:
|
||||
# There weren't any staged files so we don't need to do anything
|
||||
# special
|
||||
|
|
@ -87,7 +85,7 @@ def _unstaged_changes_cleared(patch_dir):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def staged_files_only(patch_dir):
|
||||
def staged_files_only(patch_dir: str) -> Generator[None, None, None]:
|
||||
"""Clear any unstaged changes from the git working directory inside this
|
||||
context.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import logging
|
||||
import os.path
|
||||
import sqlite3
|
||||
import tempfile
|
||||
from typing import Callable
|
||||
from typing import Generator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import file_lock
|
||||
|
|
@ -21,7 +24,7 @@ from pre_commit.util import rmtree
|
|||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def _get_default_directory():
|
||||
def _get_default_directory() -> str:
|
||||
"""Returns the default directory for the Store. This is intentionally
|
||||
underscored to indicate that `Store.get_default_directory` is the intended
|
||||
way to get this information. This is also done so
|
||||
|
|
@ -34,16 +37,16 @@ def _get_default_directory():
|
|||
)
|
||||
|
||||
|
||||
class Store(object):
|
||||
class Store:
|
||||
get_default_directory = staticmethod(_get_default_directory)
|
||||
|
||||
def __init__(self, directory=None):
|
||||
def __init__(self, directory: Optional[str] = None) -> None:
|
||||
self.directory = directory or Store.get_default_directory()
|
||||
self.db_path = os.path.join(self.directory, 'db.db')
|
||||
|
||||
if not os.path.exists(self.directory):
|
||||
mkdirp(self.directory)
|
||||
with io.open(os.path.join(self.directory, 'README'), 'w') as f:
|
||||
with open(os.path.join(self.directory, 'README'), 'w') as f:
|
||||
f.write(
|
||||
'This directory is maintained by the pre-commit project.\n'
|
||||
'Learn more: https://github.com/pre-commit/pre-commit\n',
|
||||
|
|
@ -69,21 +72,24 @@ class Store(object):
|
|||
' PRIMARY KEY (repo, ref)'
|
||||
');',
|
||||
)
|
||||
self._create_config_table_if_not_exists(db)
|
||||
self._create_config_table(db)
|
||||
|
||||
# Atomic file move
|
||||
os.rename(tmpfile, self.db_path)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def exclusive_lock(self):
|
||||
def blocked_cb(): # pragma: no cover (tests are single-process)
|
||||
def exclusive_lock(self) -> Generator[None, None, None]:
|
||||
def blocked_cb() -> None: # pragma: no cover (tests are in-process)
|
||||
logger.info('Locking pre-commit directory')
|
||||
|
||||
with file_lock.lock(os.path.join(self.directory, '.lock'), blocked_cb):
|
||||
yield
|
||||
|
||||
@contextlib.contextmanager
|
||||
def connect(self, db_path=None):
|
||||
def connect(
|
||||
self,
|
||||
db_path: Optional[str] = None,
|
||||
) -> Generator[sqlite3.Connection, None, None]:
|
||||
db_path = db_path or self.db_path
|
||||
# sqlite doesn't close its fd with its contextmanager >.<
|
||||
# contextlib.closing fixes this.
|
||||
|
|
@ -94,24 +100,29 @@ class Store(object):
|
|||
yield db
|
||||
|
||||
@classmethod
|
||||
def db_repo_name(cls, repo, deps):
|
||||
def db_repo_name(cls, repo: str, deps: Sequence[str]) -> str:
|
||||
if deps:
|
||||
return '{}:{}'.format(repo, ','.join(sorted(deps)))
|
||||
else:
|
||||
return repo
|
||||
|
||||
def _new_repo(self, repo, ref, deps, make_strategy):
|
||||
def _new_repo(
|
||||
self,
|
||||
repo: str,
|
||||
ref: str,
|
||||
deps: Sequence[str],
|
||||
make_strategy: Callable[[str], None],
|
||||
) -> str:
|
||||
repo = self.db_repo_name(repo, deps)
|
||||
|
||||
def _get_result():
|
||||
def _get_result() -> Optional[str]:
|
||||
# Check if we already exist
|
||||
with self.connect() as db:
|
||||
result = db.execute(
|
||||
'SELECT path FROM repos WHERE repo = ? AND ref = ?',
|
||||
(repo, ref),
|
||||
).fetchone()
|
||||
if result:
|
||||
return result[0]
|
||||
return result[0] if result else None
|
||||
|
||||
result = _get_result()
|
||||
if result:
|
||||
|
|
@ -122,7 +133,7 @@ class Store(object):
|
|||
if result: # pragma: no cover (race)
|
||||
return result
|
||||
|
||||
logger.info('Initializing environment for {}.'.format(repo))
|
||||
logger.info(f'Initializing environment for {repo}.')
|
||||
|
||||
directory = tempfile.mkdtemp(prefix='repo', dir=self.directory)
|
||||
with clean_path_on_failure(directory):
|
||||
|
|
@ -136,14 +147,14 @@ class Store(object):
|
|||
)
|
||||
return directory
|
||||
|
||||
def _complete_clone(self, ref, git_cmd):
|
||||
def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
|
||||
"""Perform a complete clone of a repository and its submodules """
|
||||
|
||||
git_cmd('fetch', 'origin', '--tags')
|
||||
git_cmd('checkout', ref)
|
||||
git_cmd('submodule', 'update', '--init', '--recursive')
|
||||
|
||||
def _shallow_clone(self, ref, git_cmd):
|
||||
def _shallow_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
|
||||
"""Perform a shallow clone of a repository and its submodules """
|
||||
|
||||
git_config = 'protocol.version=2'
|
||||
|
|
@ -154,14 +165,14 @@ class Store(object):
|
|||
'--depth=1',
|
||||
)
|
||||
|
||||
def clone(self, repo, ref, deps=()):
|
||||
def clone(self, repo: str, ref: str, deps: Sequence[str] = ()) -> str:
|
||||
"""Clone the given url and checkout the specific ref."""
|
||||
|
||||
def clone_strategy(directory):
|
||||
def clone_strategy(directory: str) -> None:
|
||||
git.init_repo(directory, repo)
|
||||
env = git.no_git_env()
|
||||
|
||||
def _git_cmd(*args):
|
||||
def _git_cmd(*args: str) -> None:
|
||||
cmd_output_b('git', *args, cwd=directory, env=env)
|
||||
|
||||
try:
|
||||
|
|
@ -176,17 +187,17 @@ class Store(object):
|
|||
'pre_commit_dummy_package.gemspec', 'setup.py', 'environment.yml',
|
||||
)
|
||||
|
||||
def make_local(self, deps):
|
||||
def make_local_strategy(directory):
|
||||
def make_local(self, deps: Sequence[str]) -> str:
|
||||
def make_local_strategy(directory: str) -> None:
|
||||
for resource in self.LOCAL_RESOURCES:
|
||||
contents = resource_text('empty_template_{}'.format(resource))
|
||||
with io.open(os.path.join(directory, resource), 'w') as f:
|
||||
contents = resource_text(f'empty_template_{resource}')
|
||||
with open(os.path.join(directory, resource), 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
env = git.no_git_env()
|
||||
|
||||
# initialize the git repository so it looks more like cloned repos
|
||||
def _git_cmd(*args):
|
||||
def _git_cmd(*args: str) -> None:
|
||||
cmd_output_b('git', *args, cwd=directory, env=env)
|
||||
|
||||
git.init_repo(directory, '<<unknown>>')
|
||||
|
|
@ -197,7 +208,7 @@ class Store(object):
|
|||
'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy,
|
||||
)
|
||||
|
||||
def _create_config_table_if_not_exists(self, db):
|
||||
def _create_config_table(self, db: sqlite3.Connection) -> None:
|
||||
db.executescript(
|
||||
'CREATE TABLE IF NOT EXISTS configs ('
|
||||
' path TEXT NOT NULL,'
|
||||
|
|
@ -205,32 +216,32 @@ class Store(object):
|
|||
');',
|
||||
)
|
||||
|
||||
def mark_config_used(self, path):
|
||||
def mark_config_used(self, path: str) -> None:
|
||||
path = os.path.realpath(path)
|
||||
# don't insert config files that do not exist
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
with self.connect() as db:
|
||||
# TODO: eventually remove this and only create in _create
|
||||
self._create_config_table_if_not_exists(db)
|
||||
self._create_config_table(db)
|
||||
db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,))
|
||||
|
||||
def select_all_configs(self):
|
||||
def select_all_configs(self) -> List[str]:
|
||||
with self.connect() as db:
|
||||
self._create_config_table_if_not_exists(db)
|
||||
self._create_config_table(db)
|
||||
rows = db.execute('SELECT path FROM configs').fetchall()
|
||||
return [path for path, in rows]
|
||||
|
||||
def delete_configs(self, configs):
|
||||
def delete_configs(self, configs: List[str]) -> None:
|
||||
with self.connect() as db:
|
||||
rows = [(path,) for path in configs]
|
||||
db.executemany('DELETE FROM configs WHERE path = ?', rows)
|
||||
|
||||
def select_all_repos(self):
|
||||
def select_all_repos(self) -> List[Tuple[str, str, str]]:
|
||||
with self.connect() as db:
|
||||
return db.execute('SELECT repo, ref, path from repos').fetchall()
|
||||
|
||||
def delete_repo(self, db_repo_name, ref, path):
|
||||
def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None:
|
||||
with self.connect() as db:
|
||||
db.execute(
|
||||
'DELETE FROM repos WHERE repo = ? and ref = ?',
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import os.path
|
||||
|
|
@ -8,8 +6,16 @@ import stat
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import six
|
||||
from types import TracebackType
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import IO
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from pre_commit import five
|
||||
from pre_commit import parse_shebang
|
||||
|
|
@ -21,8 +27,10 @@ else: # pragma: no cover (<PY37)
|
|||
from importlib_resources import open_binary
|
||||
from importlib_resources import read_text
|
||||
|
||||
EnvironT = Union[Dict[str, str], 'os._Environ']
|
||||
|
||||
def mkdirp(path):
|
||||
|
||||
def mkdirp(path: str) -> None:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError:
|
||||
|
|
@ -31,7 +39,7 @@ def mkdirp(path):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def clean_path_on_failure(path):
|
||||
def clean_path_on_failure(path: str) -> Generator[None, None, None]:
|
||||
"""Cleans up the directory on an exceptional failure."""
|
||||
try:
|
||||
yield
|
||||
|
|
@ -42,12 +50,12 @@ def clean_path_on_failure(path):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def noop_context():
|
||||
def noop_context() -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tmpdir():
|
||||
def tmpdir() -> Generator[str, None, None]:
|
||||
"""Contextmanager to create a temporary directory. It will be cleaned up
|
||||
afterwards.
|
||||
"""
|
||||
|
|
@ -58,15 +66,15 @@ def tmpdir():
|
|||
rmtree(tempdir)
|
||||
|
||||
|
||||
def resource_bytesio(filename):
|
||||
def resource_bytesio(filename: str) -> IO[bytes]:
|
||||
return open_binary('pre_commit.resources', filename)
|
||||
|
||||
|
||||
def resource_text(filename):
|
||||
def resource_text(filename: str) -> str:
|
||||
return read_text('pre_commit.resources', filename)
|
||||
|
||||
|
||||
def make_executable(filename):
|
||||
def make_executable(filename: str) -> None:
|
||||
original_mode = os.stat(filename).st_mode
|
||||
os.chmod(
|
||||
filename, original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
|
||||
|
|
@ -74,18 +82,23 @@ def make_executable(filename):
|
|||
|
||||
|
||||
class CalledProcessError(RuntimeError):
|
||||
def __init__(self, returncode, cmd, expected_returncode, stdout, stderr):
|
||||
super(CalledProcessError, self).__init__(
|
||||
returncode, cmd, expected_returncode, stdout, stderr,
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
returncode: int,
|
||||
cmd: Tuple[str, ...],
|
||||
expected_returncode: int,
|
||||
stdout: bytes,
|
||||
stderr: Optional[bytes],
|
||||
) -> None:
|
||||
super().__init__(returncode, cmd, expected_returncode, stdout, stderr)
|
||||
self.returncode = returncode
|
||||
self.cmd = cmd
|
||||
self.expected_returncode = expected_returncode
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
|
||||
def to_bytes(self):
|
||||
def _indent_or_none(part):
|
||||
def __bytes__(self) -> bytes:
|
||||
def _indent_or_none(part: Optional[bytes]) -> bytes:
|
||||
if part:
|
||||
return b'\n ' + part.replace(b'\n', b'\n ')
|
||||
else:
|
||||
|
|
@ -101,18 +114,14 @@ class CalledProcessError(RuntimeError):
|
|||
b'stderr:', _indent_or_none(self.stderr),
|
||||
))
|
||||
|
||||
def to_text(self):
|
||||
return self.to_bytes().decode('UTF-8')
|
||||
|
||||
if six.PY2: # pragma: no cover (py2)
|
||||
__str__ = to_bytes
|
||||
__unicode__ = to_text
|
||||
else: # pragma: no cover (py3)
|
||||
__bytes__ = to_bytes
|
||||
__str__ = to_text
|
||||
def __str__(self) -> str:
|
||||
return self.__bytes__().decode('UTF-8')
|
||||
|
||||
|
||||
def _cmd_kwargs(*cmd, **kwargs):
|
||||
def _cmd_kwargs(
|
||||
*cmd: str,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[Tuple[str, ...], Dict[str, Any]]:
|
||||
# py2/py3 on windows are more strict about the types here
|
||||
cmd = tuple(five.n(arg) for arg in cmd)
|
||||
kwargs['env'] = {
|
||||
|
|
@ -124,7 +133,10 @@ def _cmd_kwargs(*cmd, **kwargs):
|
|||
return cmd, kwargs
|
||||
|
||||
|
||||
def cmd_output_b(*cmd, **kwargs):
|
||||
def cmd_output_b(
|
||||
*cmd: str,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes, Optional[bytes]]:
|
||||
retcode = kwargs.pop('retcode', 0)
|
||||
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
|
||||
|
||||
|
|
@ -143,7 +155,7 @@ def cmd_output_b(*cmd, **kwargs):
|
|||
return returncode, stdout_b, stderr_b
|
||||
|
||||
|
||||
def cmd_output(*cmd, **kwargs):
|
||||
def cmd_output(*cmd: str, **kwargs: Any) -> Tuple[int, str, Optional[str]]:
|
||||
returncode, stdout_b, stderr_b = cmd_output_b(*cmd, **kwargs)
|
||||
stdout = stdout_b.decode('UTF-8') if stdout_b is not None else None
|
||||
stderr = stderr_b.decode('UTF-8') if stderr_b is not None else None
|
||||
|
|
@ -154,35 +166,45 @@ if os.name != 'nt': # pragma: windows no cover
|
|||
from os import openpty
|
||||
import termios
|
||||
|
||||
class Pty(object):
|
||||
def __init__(self):
|
||||
self.r = self.w = None
|
||||
class Pty:
|
||||
def __init__(self) -> None:
|
||||
self.r: Optional[int] = None
|
||||
self.w: Optional[int] = None
|
||||
|
||||
def __enter__(self):
|
||||
def __enter__(self) -> 'Pty':
|
||||
self.r, self.w = openpty()
|
||||
|
||||
# tty flags normally change \n to \r\n
|
||||
attrs = termios.tcgetattr(self.r)
|
||||
assert isinstance(attrs[1], int)
|
||||
attrs[1] &= ~(termios.ONLCR | termios.OPOST)
|
||||
termios.tcsetattr(self.r, termios.TCSANOW, attrs)
|
||||
|
||||
return self
|
||||
|
||||
def close_w(self):
|
||||
def close_w(self) -> None:
|
||||
if self.w is not None:
|
||||
os.close(self.w)
|
||||
self.w = None
|
||||
|
||||
def close_r(self):
|
||||
def close_r(self) -> None:
|
||||
assert self.r is not None
|
||||
os.close(self.r)
|
||||
self.r = None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> None:
|
||||
self.close_w()
|
||||
self.close_r()
|
||||
|
||||
def cmd_output_p(*cmd, **kwargs):
|
||||
def cmd_output_p(
|
||||
*cmd: str,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes, Optional[bytes]]:
|
||||
assert kwargs.pop('retcode') is None
|
||||
assert kwargs['stderr'] == subprocess.STDOUT, kwargs['stderr']
|
||||
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
|
||||
|
|
@ -193,6 +215,7 @@ if os.name != 'nt': # pragma: windows no cover
|
|||
return e.to_output()
|
||||
|
||||
with open(os.devnull) as devnull, Pty() as pty:
|
||||
assert pty.r is not None
|
||||
kwargs.update({'stdin': devnull, 'stdout': pty.w, 'stderr': pty.w})
|
||||
proc = subprocess.Popen(cmd, **kwargs)
|
||||
pty.close_w()
|
||||
|
|
@ -216,9 +239,13 @@ else: # pragma: no cover
|
|||
cmd_output_p = cmd_output_b
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
def rmtree(path: str) -> None:
|
||||
"""On windows, rmtree fails for readonly dirs."""
|
||||
def handle_remove_readonly(func, path, exc):
|
||||
def handle_remove_readonly(
|
||||
func: Callable[..., Any],
|
||||
path: str,
|
||||
exc: Tuple[Type[OSError], OSError, TracebackType],
|
||||
) -> None:
|
||||
excvalue = exc[1]
|
||||
if (
|
||||
func in (os.rmdir, os.remove, os.unlink) and
|
||||
|
|
@ -232,6 +259,6 @@ def rmtree(path):
|
|||
shutil.rmtree(path, ignore_errors=False, onerror=handle_remove_readonly)
|
||||
|
||||
|
||||
def parse_version(s):
|
||||
def parse_version(s: str) -> Tuple[int, ...]:
|
||||
"""poor man's version comparison"""
|
||||
return tuple(int(p) for p in s.split('.'))
|
||||
|
|
|
|||
|
|
@ -1,22 +1,29 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import six
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Generator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import cmd_output_p
|
||||
from pre_commit.util import EnvironT
|
||||
|
||||
TArg = TypeVar('TArg')
|
||||
TRet = TypeVar('TRet')
|
||||
|
||||
|
||||
def _environ_size(_env=None):
|
||||
def _environ_size(_env: Optional[EnvironT] = None) -> int:
|
||||
environ = _env if _env is not None else getattr(os, 'environb', os.environ)
|
||||
size = 8 * len(environ) # number of pointers in `envp`
|
||||
for k, v in environ.items():
|
||||
|
|
@ -24,9 +31,9 @@ def _environ_size(_env=None):
|
|||
return size
|
||||
|
||||
|
||||
def _get_platform_max_length(): # pragma: no cover (platform specific)
|
||||
def _get_platform_max_length() -> int: # pragma: no cover (platform specific)
|
||||
if os.name == 'posix':
|
||||
maximum = os.sysconf(str('SC_ARG_MAX')) - 2048 - _environ_size()
|
||||
maximum = os.sysconf('SC_ARG_MAX') - 2048 - _environ_size()
|
||||
maximum = max(min(maximum, 2 ** 17), 2 ** 12)
|
||||
return maximum
|
||||
elif os.name == 'nt':
|
||||
|
|
@ -36,17 +43,14 @@ def _get_platform_max_length(): # pragma: no cover (platform specific)
|
|||
return 2 ** 12
|
||||
|
||||
|
||||
def _command_length(*cmd):
|
||||
def _command_length(*cmd: str) -> int:
|
||||
full_cmd = ' '.join(cmd)
|
||||
|
||||
# win32 uses the amount of characters, more details at:
|
||||
# https://github.com/pre-commit/pre-commit/pull/839
|
||||
if sys.platform == 'win32':
|
||||
# the python2.x apis require bytes, we encode as UTF-8
|
||||
if six.PY2:
|
||||
return len(full_cmd.encode('utf-8'))
|
||||
else:
|
||||
return len(full_cmd.encode('utf-16le')) // 2
|
||||
return len(full_cmd.encode('utf-16le')) // 2
|
||||
else:
|
||||
return len(full_cmd.encode(sys.getfilesystemencoding()))
|
||||
|
||||
|
|
@ -55,7 +59,12 @@ class ArgumentTooLongError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def partition(cmd, varargs, target_concurrency, _max_length=None):
|
||||
def partition(
|
||||
cmd: Sequence[str],
|
||||
varargs: Sequence[str],
|
||||
target_concurrency: int,
|
||||
_max_length: Optional[int] = None,
|
||||
) -> Tuple[Tuple[str, ...], ...]:
|
||||
_max_length = _max_length or _get_platform_max_length()
|
||||
|
||||
# Generally, we try to partition evenly into at least `target_concurrency`
|
||||
|
|
@ -65,7 +74,7 @@ def partition(cmd, varargs, target_concurrency, _max_length=None):
|
|||
cmd = tuple(cmd)
|
||||
ret = []
|
||||
|
||||
ret_cmd = []
|
||||
ret_cmd: List[str] = []
|
||||
# Reversed so arguments are in order
|
||||
varargs = list(reversed(varargs))
|
||||
|
||||
|
|
@ -95,7 +104,10 @@ def partition(cmd, varargs, target_concurrency, _max_length=None):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _thread_mapper(maxsize):
|
||||
def _thread_mapper(maxsize: int) -> Generator[
|
||||
Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]],
|
||||
None, None,
|
||||
]:
|
||||
if maxsize == 1:
|
||||
yield map
|
||||
else:
|
||||
|
|
@ -103,7 +115,11 @@ def _thread_mapper(maxsize):
|
|||
yield ex.map
|
||||
|
||||
|
||||
def xargs(cmd, varargs, **kwargs):
|
||||
def xargs(
|
||||
cmd: Tuple[str, ...],
|
||||
varargs: Sequence[str],
|
||||
**kwargs: Any,
|
||||
) -> Tuple[int, bytes]:
|
||||
"""A simplified implementation of xargs.
|
||||
|
||||
color: Make a pty if on a platform that supports it
|
||||
|
|
@ -123,9 +139,11 @@ def xargs(cmd, varargs, **kwargs):
|
|||
|
||||
partitions = partition(cmd, varargs, target_concurrency, max_length)
|
||||
|
||||
def run_cmd_partition(run_cmd):
|
||||
def run_cmd_partition(
|
||||
run_cmd: Tuple[str, ...],
|
||||
) -> Tuple[int, bytes, Optional[bytes]]:
|
||||
return cmd_fn(
|
||||
*run_cmd, retcode=None, stderr=subprocess.STDOUT, **kwargs
|
||||
*run_cmd, retcode=None, stderr=subprocess.STDOUT, **kwargs,
|
||||
)
|
||||
|
||||
threads = min(len(partitions), target_concurrency)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
-e .
|
||||
|
||||
coverage
|
||||
mock
|
||||
pytest
|
||||
pytest-env
|
||||
|
|
|
|||
21
setup.cfg
21
setup.cfg
|
|
@ -11,10 +11,8 @@ license = MIT
|
|||
license_file = LICENSE
|
||||
classifiers =
|
||||
License :: OSI Approved :: MIT License
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.5
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.6
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
|
|
@ -29,13 +27,11 @@ install_requires =
|
|||
identify>=1.0.0
|
||||
nodeenv>=0.11.1
|
||||
pyyaml
|
||||
six
|
||||
toml
|
||||
virtualenv>=15.2
|
||||
futures;python_version<"3.2"
|
||||
importlib-metadata;python_version<"3.8"
|
||||
importlib-resources;python_version<"3.7"
|
||||
python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
|
||||
python_requires = >=3.6
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
|
|
@ -56,3 +52,16 @@ exclude =
|
|||
|
||||
[bdist_wheel]
|
||||
universal = True
|
||||
|
||||
[mypy]
|
||||
check_untyped_defs = true
|
||||
disallow_any_generics = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_untyped_defs = true
|
||||
no_implicit_optional = true
|
||||
|
||||
[mypy-testing.*]
|
||||
disallow_untyped_defs = false
|
||||
|
||||
[mypy-tests.*]
|
||||
disallow_untyped_defs = false
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
|
|
@ -58,10 +54,10 @@ def modify_manifest(path, commit=True):
|
|||
.pre-commit-hooks.yaml.
|
||||
"""
|
||||
manifest_path = os.path.join(path, C.MANIFEST_FILE)
|
||||
with io.open(manifest_path) as f:
|
||||
with open(manifest_path) as f:
|
||||
manifest = ordered_load(f.read())
|
||||
yield manifest
|
||||
with io.open(manifest_path, 'w') as manifest_file:
|
||||
with open(manifest_path, 'w') as manifest_file:
|
||||
manifest_file.write(ordered_dump(manifest, **C.YAML_DUMP_KWARGS))
|
||||
if commit:
|
||||
git_commit(msg=modify_manifest.__name__, cwd=path)
|
||||
|
|
@ -73,10 +69,10 @@ def modify_config(path='.', commit=True):
|
|||
.pre-commit-config.yaml
|
||||
"""
|
||||
config_path = os.path.join(path, C.CONFIG_FILE)
|
||||
with io.open(config_path) as f:
|
||||
with open(config_path) as f:
|
||||
config = ordered_load(f.read())
|
||||
yield config
|
||||
with io.open(config_path, 'w', encoding='UTF-8') as config_file:
|
||||
with open(config_path, 'w', encoding='UTF-8') as config_file:
|
||||
config_file.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))
|
||||
if commit:
|
||||
git_commit(msg=modify_config.__name__, cwd=path)
|
||||
|
|
@ -101,7 +97,7 @@ def sample_meta_config():
|
|||
def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
|
||||
manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
|
||||
config = {
|
||||
'repo': 'file://{}'.format(repo_path),
|
||||
'repo': f'file://{repo_path}',
|
||||
'rev': rev or git.head_rev(repo_path),
|
||||
'hooks': hooks or [{'id': hook['id']} for hook in manifest],
|
||||
}
|
||||
|
|
@ -117,7 +113,7 @@ def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
|
|||
|
||||
def read_config(directory, config_file=C.CONFIG_FILE):
|
||||
config_path = os.path.join(directory, config_file)
|
||||
with io.open(config_path) as f:
|
||||
with open(config_path) as f:
|
||||
config = ordered_load(f.read())
|
||||
return config
|
||||
|
||||
|
|
@ -126,7 +122,7 @@ def write_config(directory, config, config_file=C.CONFIG_FILE):
|
|||
if type(config) is not list and 'repos' not in config:
|
||||
assert isinstance(config, dict), config
|
||||
config = {'repos': [config]}
|
||||
with io.open(os.path.join(directory, config_file), 'w') as outfile:
|
||||
with open(os.path.join(directory, config_file), 'w') as outfile:
|
||||
outfile.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))
|
||||
|
||||
|
||||
|
|
|
|||
27
testing/gen-languages-all
Executable file
27
testing/gen-languages-all
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
|
||||
LANGUAGES = [
|
||||
'conda', 'docker', 'docker_image', 'fail', 'golang', 'node', 'pygrep',
|
||||
'python', 'python_venv', 'ruby', 'rust', 'script', 'swift', 'system',
|
||||
]
|
||||
FIELDS = [
|
||||
'ENVIRONMENT_DIR', 'get_default_version', 'healthy', 'install_environment',
|
||||
'run_hook',
|
||||
]
|
||||
|
||||
|
||||
def main() -> int:
|
||||
print(f' # BEGIN GENERATED ({sys.argv[0]})')
|
||||
for lang in LANGUAGES:
|
||||
parts = [f' {lang!r}: Language(name={lang!r}']
|
||||
for k in FIELDS:
|
||||
parts.append(f', {k}={lang}.{k}')
|
||||
parts.append('), # noqa: E501')
|
||||
print(''.join(parts))
|
||||
print(' # END GENERATED')
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
#!/usr/bin/env bash
|
||||
# This is a script used in travis-ci to install swift
|
||||
# This is a script used in CI to install swift
|
||||
set -euxo pipefail
|
||||
|
||||
. /etc/lsb-release
|
||||
if [ "$DISTRIB_CODENAME" = "trusty" ]; then
|
||||
SWIFT_URL='https://swift.org/builds/swift-4.0.3-release/ubuntu1404/swift-4.0.3-RELEASE/swift-4.0.3-RELEASE-ubuntu14.04.tar.gz'
|
||||
SWIFT_HASH="dddb40ec4956e4f6a3f4532d859691d5d1ba8822f6e8b4ec6c452172dbede5ae"
|
||||
if [ "$DISTRIB_CODENAME" = "bionic" ]; then
|
||||
SWIFT_URL='https://swift.org/builds/swift-5.1.3-release/ubuntu1804/swift-5.1.3-RELEASE/swift-5.1.3-RELEASE-ubuntu18.04.tar.gz'
|
||||
SWIFT_HASH='ac82ccd773fe3d586fc340814e31e120da1ff695c6a712f6634e9cc720769610'
|
||||
else
|
||||
SWIFT_URL='https://swift.org/builds/swift-4.0.3-release/ubuntu1604/swift-4.0.3-RELEASE/swift-4.0.3-RELEASE-ubuntu16.04.tar.gz'
|
||||
SWIFT_HASH="9adf64cabc7c02ea2d08f150b449b05e46bd42d6e542bf742b3674f5c37f0dbf"
|
||||
echo "unknown dist: ${DISTRIB_CODENAME}" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
check() {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import sys
|
|||
def main():
|
||||
for i in range(6):
|
||||
f = sys.stdout if i % 2 == 0 else sys.stderr
|
||||
f.write('{}\n'.format(i))
|
||||
f.write(f'{i}\n')
|
||||
f.flush()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
// swift-tools-version:5.0
|
||||
import PackageDescription
|
||||
|
||||
let package = Package(
|
||||
name: "swift_hooks_repo"
|
||||
name: "swift_hooks_repo",
|
||||
targets: [.target(name: "swift_hooks_repo")]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import subprocess
|
||||
|
|
@ -50,7 +48,7 @@ def broken_deep_listdir(): # pragma: no cover (platform specific)
|
|||
if sys.platform != 'win32':
|
||||
return False
|
||||
try:
|
||||
os.listdir(str('\\\\?\\') + os.path.abspath(str('.')))
|
||||
os.listdir('\\\\?\\' + os.path.abspath('.'))
|
||||
except OSError:
|
||||
return True
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import cfgv
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit import envcontext
|
||||
|
|
@ -14,7 +12,7 @@ from pre_commit.color import use_color
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
('in_text', 'in_color', 'in_use_color', 'expected'), (
|
||||
('foo', GREEN, True, '{}foo\033[0m'.format(GREEN)),
|
||||
('foo', GREEN, True, f'{GREEN}foo\033[0m'),
|
||||
('foo', GREEN, False, 'foo'),
|
||||
),
|
||||
)
|
||||
|
|
@ -39,21 +37,21 @@ def test_use_color_no_tty():
|
|||
def test_use_color_tty_with_color_support():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', True):
|
||||
with envcontext.envcontext([('TERM', envcontext.UNSET)]):
|
||||
with envcontext.envcontext((('TERM', envcontext.UNSET),)):
|
||||
assert use_color('auto') is True
|
||||
|
||||
|
||||
def test_use_color_tty_without_color_support():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', False):
|
||||
with envcontext.envcontext([('TERM', envcontext.UNSET)]):
|
||||
with envcontext.envcontext((('TERM', envcontext.UNSET),)):
|
||||
assert use_color('auto') is False
|
||||
|
||||
|
||||
def test_use_color_dumb_term():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', True):
|
||||
with envcontext.envcontext([('TERM', 'dumb')]):
|
||||
with envcontext.envcontext((('TERM', 'dumb'),)):
|
||||
assert use_color('auto') is False
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import pipes
|
||||
|
||||
import pytest
|
||||
|
|
@ -213,7 +211,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name(
|
|||
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
before = f.read()
|
||||
repo_name = 'file://{}'.format(out_of_date.path)
|
||||
repo_name = f'file://{out_of_date.path}'
|
||||
ret = autoupdate(
|
||||
C.CONFIG_FILE, store, freeze=False, tags_only=False,
|
||||
repos=(repo_name,),
|
||||
|
|
@ -312,7 +310,7 @@ def test_autoupdate_freeze(tagged, in_tmpdir, store):
|
|||
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
expected = 'rev: {} # frozen: v1.2.3'.format(tagged.head_rev)
|
||||
expected = f'rev: {tagged.head_rev} # frozen: v1.2.3'
|
||||
assert expected in f.read()
|
||||
|
||||
# if we un-freeze it should remove the frozen comment
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit.commands.clean import clean
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os.path
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.commands.init_templatedir import init_templatedir
|
||||
|
|
@ -25,7 +24,7 @@ def test_init_templatedir(tmpdir, tempdir_factory, store, cap_out):
|
|||
'[WARNING] maybe `git config --global init.templateDir',
|
||||
)
|
||||
|
||||
with envcontext([('GIT_TEMPLATE_DIR', target)]):
|
||||
with envcontext((('GIT_TEMPLATE_DIR', target),)):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
|
||||
with cwd(path):
|
||||
|
|
@ -53,7 +52,7 @@ def test_init_templatedir_already_set(tmpdir, tempdir_factory, store, cap_out):
|
|||
|
||||
def test_init_templatedir_not_set(tmpdir, store, cap_out):
|
||||
# set HOME to ignore the current `.gitconfig`
|
||||
with envcontext([('HOME', str(tmpdir))]):
|
||||
with envcontext((('HOME', str(tmpdir)),)):
|
||||
with tmpdir.join('tmpl').ensure_dir().as_cwd():
|
||||
# we have not set init.templateDir so this should produce a warning
|
||||
init_templatedir(
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.commands.install_uninstall import CURRENT_HASH
|
||||
|
|
@ -123,7 +117,7 @@ def _get_commit_output(tempdir_factory, touch_file='foo', **kwargs):
|
|||
fn=cmd_output_mocked_pre_commit_home,
|
||||
retcode=None,
|
||||
tempdir_factory=tempdir_factory,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -203,7 +197,7 @@ def test_commit_am(tempdir_factory, store):
|
|||
open('unstaged', 'w').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit(cwd=path)
|
||||
with io.open('unstaged', 'w') as foo_file:
|
||||
with open('unstaged', 'w') as foo_file:
|
||||
foo_file.write('Oh hai')
|
||||
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
|
@ -314,7 +308,7 @@ EXISTING_COMMIT_RUN = re.compile(
|
|||
|
||||
def _write_legacy_hook(path):
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write('#!/usr/bin/env bash\necho "legacy hook"\n')
|
||||
make_executable(f.name)
|
||||
|
||||
|
|
@ -377,7 +371,7 @@ def test_failing_existing_hook_returns_1(tempdir_factory, store):
|
|||
with cwd(path):
|
||||
# Write out a failing "old" hook
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n')
|
||||
make_executable(f.name)
|
||||
|
||||
|
|
@ -439,7 +433,7 @@ def test_replace_old_commit_script(tempdir_factory, store):
|
|||
)
|
||||
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write(new_contents)
|
||||
make_executable(f.name)
|
||||
|
||||
|
|
@ -525,7 +519,7 @@ def _get_push_output(tempdir_factory, opts=()):
|
|||
return cmd_output_mocked_pre_commit_home(
|
||||
'git', 'push', 'origin', 'HEAD:new_branch', *opts,
|
||||
tempdir_factory=tempdir_factory,
|
||||
retcode=None
|
||||
retcode=None,
|
||||
)[:2]
|
||||
|
||||
|
||||
|
|
@ -616,7 +610,7 @@ def test_pre_push_legacy(tempdir_factory, store):
|
|||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
mkdirp(os.path.join(path, '.git/hooks'))
|
||||
with io.open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
|
||||
with open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
|
||||
f.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
|
|
@ -665,7 +659,7 @@ def test_commit_msg_integration_passing(
|
|||
def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
|
||||
hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg')
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
with io.open(hook_path, 'w') as hook_file:
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
|
|
@ -709,7 +703,7 @@ def test_prepare_commit_msg_integration_passing(
|
|||
commit_msg_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
|
||||
)
|
||||
with io.open(commit_msg_path) as f:
|
||||
with open(commit_msg_path) as f:
|
||||
assert 'Signed off by: ' in f.read()
|
||||
|
||||
|
||||
|
|
@ -720,7 +714,7 @@ def test_prepare_commit_msg_legacy(
|
|||
prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg',
|
||||
)
|
||||
mkdirp(os.path.dirname(hook_path))
|
||||
with io.open(hook_path, 'w') as hook_file:
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
|
|
@ -739,7 +733,7 @@ def test_prepare_commit_msg_legacy(
|
|||
commit_msg_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
|
||||
)
|
||||
with io.open(commit_msg_path) as f:
|
||||
with open(commit_msg_path) as f:
|
||||
assert 'Signed off by: ' in f.read()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
|
|||
|
|
@ -1,13 +1,9 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import pipes
|
||||
import sys
|
||||
import time
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
@ -154,7 +150,7 @@ def test_types_hook_repository(cap_out, store, tempdir_factory):
|
|||
def test_exclude_types_hook_repository(cap_out, store, tempdir_factory):
|
||||
git_path = make_consuming_repo(tempdir_factory, 'exclude_types_repo')
|
||||
with cwd(git_path):
|
||||
with io.open('exe', 'w') as exe:
|
||||
with open('exe', 'w') as exe:
|
||||
exe.write('#!/usr/bin/env python3\n')
|
||||
make_executable('exe')
|
||||
cmd_output('git', 'add', 'exe')
|
||||
|
|
@ -601,8 +597,8 @@ def test_stages(cap_out, store, repo_with_passing_hook):
|
|||
'repo': 'local',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'do-not-commit-{}'.format(i),
|
||||
'name': 'hook {}'.format(i),
|
||||
'id': f'do-not-commit-{i}',
|
||||
'name': f'hook {i}',
|
||||
'entry': 'DO NOT COMMIT',
|
||||
'language': 'pygrep',
|
||||
'stages': [stage],
|
||||
|
|
@ -636,7 +632,7 @@ def test_stages(cap_out, store, repo_with_passing_hook):
|
|||
|
||||
def test_commit_msg_hook(cap_out, store, commit_msg_repo):
|
||||
filename = '.git/COMMIT_EDITMSG'
|
||||
with io.open(filename, 'w') as f:
|
||||
with open(filename, 'w') as f:
|
||||
f.write('This is the commit message')
|
||||
|
||||
_test_run(
|
||||
|
|
@ -652,7 +648,7 @@ def test_commit_msg_hook(cap_out, store, commit_msg_repo):
|
|||
|
||||
def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo):
|
||||
filename = '.git/COMMIT_EDITMSG'
|
||||
with io.open(filename, 'w') as f:
|
||||
with open(filename, 'w') as f:
|
||||
f.write('This is the commit message')
|
||||
|
||||
_test_run(
|
||||
|
|
@ -665,7 +661,7 @@ def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo):
|
|||
stage=False,
|
||||
)
|
||||
|
||||
with io.open(filename) as f:
|
||||
with open(filename) as f:
|
||||
assert 'Signed off by: ' in f.read()
|
||||
|
||||
|
||||
|
|
@ -692,7 +688,7 @@ def test_local_hook_passes(cap_out, store, repo_with_passing_hook):
|
|||
}
|
||||
add_config_to_repo(repo_with_passing_hook, config)
|
||||
|
||||
with io.open('dummy.py', 'w') as staged_file:
|
||||
with open('dummy.py', 'w') as staged_file:
|
||||
staged_file.write('"""TODO: something"""\n')
|
||||
cmd_output('git', 'add', 'dummy.py')
|
||||
|
||||
|
|
@ -719,7 +715,7 @@ def test_local_hook_fails(cap_out, store, repo_with_passing_hook):
|
|||
}
|
||||
add_config_to_repo(repo_with_passing_hook, config)
|
||||
|
||||
with io.open('dummy.py', 'w') as staged_file:
|
||||
with open('dummy.py', 'w') as staged_file:
|
||||
staged_file.write('"""TODO: something"""\n')
|
||||
cmd_output('git', 'add', 'dummy.py')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from pre_commit.commands.sample_config import sample_config
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import time
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.commands.try_repo import try_repo
|
||||
|
|
|
|||
|
|
@ -1,14 +1,10 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import functools
|
||||
import io
|
||||
import logging
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from pre_commit import output
|
||||
from pre_commit.envcontext import envcontext
|
||||
|
|
@ -36,19 +32,19 @@ def no_warnings(recwarn):
|
|||
' missing __init__' in message
|
||||
):
|
||||
warnings.append(
|
||||
'{}:{} {}'.format(warning.filename, warning.lineno, message),
|
||||
f'{warning.filename}:{warning.lineno} {message}',
|
||||
)
|
||||
assert not warnings
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tempdir_factory(tmpdir):
|
||||
class TmpdirFactory(object):
|
||||
class TmpdirFactory:
|
||||
def __init__(self):
|
||||
self.tmpdir_count = 0
|
||||
|
||||
def get(self):
|
||||
path = tmpdir.join(six.text_type(self.tmpdir_count)).strpath
|
||||
path = tmpdir.join(str(self.tmpdir_count)).strpath
|
||||
self.tmpdir_count += 1
|
||||
os.mkdir(path)
|
||||
return path
|
||||
|
|
@ -73,18 +69,18 @@ def in_git_dir(tmpdir):
|
|||
|
||||
def _make_conflict():
|
||||
cmd_output('git', 'checkout', 'origin/master', '-b', 'foo')
|
||||
with io.open('conflict_file', 'w') as conflict_file:
|
||||
with open('conflict_file', 'w') as conflict_file:
|
||||
conflict_file.write('herp\nderp\n')
|
||||
cmd_output('git', 'add', 'conflict_file')
|
||||
with io.open('foo_only_file', 'w') as foo_only_file:
|
||||
with open('foo_only_file', 'w') as foo_only_file:
|
||||
foo_only_file.write('foo')
|
||||
cmd_output('git', 'add', 'foo_only_file')
|
||||
git_commit(msg=_make_conflict.__name__)
|
||||
cmd_output('git', 'checkout', 'origin/master', '-b', 'bar')
|
||||
with io.open('conflict_file', 'w') as conflict_file:
|
||||
with open('conflict_file', 'w') as conflict_file:
|
||||
conflict_file.write('harp\nddrp\n')
|
||||
cmd_output('git', 'add', 'conflict_file')
|
||||
with io.open('bar_only_file', 'w') as bar_only_file:
|
||||
with open('bar_only_file', 'w') as bar_only_file:
|
||||
bar_only_file.write('bar')
|
||||
cmd_output('git', 'add', 'bar_only_file')
|
||||
git_commit(msg=_make_conflict.__name__)
|
||||
|
|
@ -145,14 +141,14 @@ def prepare_commit_msg_repo(tempdir_factory):
|
|||
'hooks': [{
|
||||
'id': 'add-signoff',
|
||||
'name': 'Add "Signed off by:"',
|
||||
'entry': './{}'.format(script_name),
|
||||
'entry': f'./{script_name}',
|
||||
'language': 'script',
|
||||
'stages': ['prepare-commit-msg'],
|
||||
}],
|
||||
}
|
||||
write_config(path, config)
|
||||
with cwd(path):
|
||||
with io.open(script_name, 'w') as script_file:
|
||||
with open(script_name, 'w') as script_file:
|
||||
script_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
|
|
@ -229,7 +225,7 @@ def log_info_mock():
|
|||
yield mck
|
||||
|
||||
|
||||
class FakeStream(object):
|
||||
class FakeStream:
|
||||
def __init__(self):
|
||||
self.data = io.BytesIO()
|
||||
|
||||
|
|
@ -240,7 +236,7 @@ class FakeStream(object):
|
|||
pass
|
||||
|
||||
|
||||
class Fixture(object):
|
||||
class Fixture:
|
||||
def __init__(self, stream):
|
||||
self._stream = stream
|
||||
|
||||
|
|
@ -278,5 +274,5 @@ def fake_log_handler():
|
|||
@pytest.fixture(scope='session', autouse=True)
|
||||
def set_git_templatedir(tmpdir_factory):
|
||||
tdir = str(tmpdir_factory.mktemp('git_template_dir'))
|
||||
with envcontext([('GIT_TEMPLATE_DIR', tdir)]):
|
||||
with envcontext((('GIT_TEMPLATE_DIR', tdir),)):
|
||||
yield
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit.envcontext import envcontext
|
||||
|
|
@ -94,16 +91,16 @@ def test_exception_safety():
|
|||
class MyError(RuntimeError):
|
||||
pass
|
||||
|
||||
env = {}
|
||||
env = {'hello': 'world'}
|
||||
with pytest.raises(MyError):
|
||||
with envcontext([('foo', 'bar')], _env=env):
|
||||
with envcontext((('foo', 'bar'),), _env=env):
|
||||
raise MyError()
|
||||
assert env == {}
|
||||
assert env == {'hello': 'world'}
|
||||
|
||||
|
||||
def test_integration_os_environ():
|
||||
with mock.patch.dict(os.environ, {'FOO': 'bar'}, clear=True):
|
||||
assert os.environ == {'FOO': 'bar'}
|
||||
with envcontext([('HERP', 'derp')]):
|
||||
with envcontext((('HERP', 'derp'),)):
|
||||
assert os.environ == {'FOO': 'bar', 'HERP': 'derp'}
|
||||
assert os.environ == {'FOO': 'bar'}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit import error_handler
|
||||
|
|
@ -109,7 +104,7 @@ def test_log_and_exit(cap_out, mock_store_dir):
|
|||
)
|
||||
|
||||
assert os.path.exists(log_file)
|
||||
with io.open(log_file) as f:
|
||||
with open(log_file) as f:
|
||||
logged = f.read()
|
||||
expected = (
|
||||
r'^### version information\n'
|
||||
|
|
@ -158,4 +153,4 @@ def test_error_handler_no_tty(tempdir_factory):
|
|||
log_file = os.path.join(pre_commit_home, 'pre-commit.log')
|
||||
out_lines = out.splitlines()
|
||||
assert out_lines[-2] == 'An unexpected error has occurred: ValueError: ☃'
|
||||
assert out_lines[-1] == 'Check the log at {}'.format(log_file)
|
||||
assert out_lines[-1] == f'Check the log at {log_file}'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
|
||||
import pytest
|
||||
|
|
|
|||
|
|
@ -1,58 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from pre_commit.languages.all import all_languages
|
||||
from pre_commit.languages.all import languages
|
||||
|
||||
|
||||
if six.PY2: # pragma: no cover
|
||||
ArgSpec = functools.partial(
|
||||
inspect.ArgSpec, varargs=None, keywords=None, defaults=None,
|
||||
)
|
||||
getargspec = inspect.getargspec
|
||||
else: # pragma: no cover
|
||||
ArgSpec = functools.partial(
|
||||
inspect.FullArgSpec, varargs=None, varkw=None, defaults=None,
|
||||
kwonlyargs=[], kwonlydefaults=None, annotations={},
|
||||
)
|
||||
getargspec = inspect.getfullargspec
|
||||
|
||||
|
||||
@pytest.mark.parametrize('language', all_languages)
|
||||
def test_install_environment_argspec(language):
|
||||
expected_argspec = ArgSpec(
|
||||
args=['prefix', 'version', 'additional_dependencies'],
|
||||
)
|
||||
argspec = getargspec(languages[language].install_environment)
|
||||
assert argspec == expected_argspec
|
||||
|
||||
|
||||
@pytest.mark.parametrize('language', all_languages)
|
||||
def test_ENVIRONMENT_DIR(language):
|
||||
assert hasattr(languages[language], 'ENVIRONMENT_DIR')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('language', all_languages)
|
||||
def test_run_hook_argpsec(language):
|
||||
expected_argspec = ArgSpec(args=['hook', 'file_args', 'color'])
|
||||
argspec = getargspec(languages[language].run_hook)
|
||||
assert argspec == expected_argspec
|
||||
|
||||
|
||||
@pytest.mark.parametrize('language', all_languages)
|
||||
def test_get_default_version_argspec(language):
|
||||
expected_argspec = ArgSpec(args=[])
|
||||
argspec = getargspec(languages[language].get_default_version)
|
||||
assert argspec == expected_argspec
|
||||
|
||||
|
||||
@pytest.mark.parametrize('language', all_languages)
|
||||
def test_healthy_argspec(language):
|
||||
expected_argspec = ArgSpec(args=['prefix', 'language_version'])
|
||||
argspec = getargspec(languages[language].healthy)
|
||||
assert argspec == expected_argspec
|
||||
|
|
@ -1,7 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from pre_commit.languages import docker
|
||||
from pre_commit.util import CalledProcessError
|
||||
|
|
@ -10,7 +7,7 @@ from pre_commit.util import CalledProcessError
|
|||
def test_docker_is_running_process_error():
|
||||
with mock.patch(
|
||||
'pre_commit.languages.docker.cmd_output_b',
|
||||
side_effect=CalledProcessError(None, None, None, None, None),
|
||||
side_effect=CalledProcessError(1, (), 0, b'', None),
|
||||
):
|
||||
assert docker.docker_is_running() is False
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.languages.golang import guess_go_dir
|
||||
|
|
|
|||
|
|
@ -1,11 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
@ -20,7 +17,7 @@ def test_basic_get_default_version():
|
|||
|
||||
|
||||
def test_basic_healthy():
|
||||
assert helpers.basic_healthy(None, None) is True
|
||||
assert helpers.basic_healthy(Prefix('.'), 'default') is True
|
||||
|
||||
|
||||
def test_failed_setup_command_does_not_unicode_error():
|
||||
|
|
@ -80,4 +77,6 @@ def test_target_concurrency_cpu_count_not_implemented():
|
|||
|
||||
|
||||
def test_shuffled_is_deterministic():
|
||||
assert helpers._shuffled(range(10)) == [3, 7, 8, 2, 4, 6, 5, 1, 0, 9]
|
||||
seq = [str(i) for i in range(10)]
|
||||
expected = ['3', '7', '8', '2', '4', '6', '5', '1', '0', '9']
|
||||
assert helpers._shuffled(seq) == expected
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.languages import pygrep
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
@ -16,7 +13,7 @@ def test_norm_version_expanduser():
|
|||
home = os.path.expanduser('~')
|
||||
if os.name == 'nt': # pragma: no cover (nt)
|
||||
path = r'~\python343'
|
||||
expected_path = r'{}\python343'.format(home)
|
||||
expected_path = fr'{home}\python343'
|
||||
else: # pragma: windows no cover
|
||||
path = '~/.pyenv/versions/3.4.3/bin/python'
|
||||
expected_path = home + '/.pyenv/versions/3.4.3/bin/python'
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import pipes
|
||||
|
||||
|
|
|
|||
|
|
@ -1,27 +1,21 @@
|
|||
from __future__ import unicode_literals
|
||||
import logging
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit.logging_handler import LoggingHandler
|
||||
|
||||
|
||||
class FakeLogRecord(object):
|
||||
def __init__(self, message, levelname, levelno):
|
||||
self.message = message
|
||||
self.levelname = levelname
|
||||
self.levelno = levelno
|
||||
|
||||
def getMessage(self):
|
||||
return self.message
|
||||
def _log_record(message, level):
|
||||
return logging.LogRecord('name', level, '', 1, message, {}, None)
|
||||
|
||||
|
||||
def test_logging_handler_color(cap_out):
|
||||
handler = LoggingHandler(True)
|
||||
handler.emit(FakeLogRecord('hi', 'WARNING', 30))
|
||||
handler.emit(_log_record('hi', logging.WARNING))
|
||||
ret = cap_out.get()
|
||||
assert ret == color.YELLOW + '[WARNING]' + color.NORMAL + ' hi\n'
|
||||
|
||||
|
||||
def test_logging_handler_no_color(cap_out):
|
||||
handler = LoggingHandler(False)
|
||||
handler.emit(FakeLogRecord('hi', 'WARNING', 30))
|
||||
handler.emit(_log_record('hi', logging.WARNING))
|
||||
assert cap_out.get() == '[WARNING] hi\n'
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
@ -27,25 +24,24 @@ def test_append_replace_default(argv, expected):
|
|||
assert parser.parse_args(argv).f == expected
|
||||
|
||||
|
||||
class Args(object):
|
||||
def __init__(self, **kwargs):
|
||||
kwargs.setdefault('command', 'help')
|
||||
kwargs.setdefault('config', C.CONFIG_FILE)
|
||||
self.__dict__.update(kwargs)
|
||||
def _args(**kwargs):
|
||||
kwargs.setdefault('command', 'help')
|
||||
kwargs.setdefault('config', C.CONFIG_FILE)
|
||||
return argparse.Namespace(**kwargs)
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_not_in_git_dir(in_tmpdir):
|
||||
with pytest.raises(FatalError):
|
||||
main._adjust_args_and_chdir(Args())
|
||||
main._adjust_args_and_chdir(_args())
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_in_dot_git_dir(in_git_dir):
|
||||
with in_git_dir.join('.git').as_cwd(), pytest.raises(FatalError):
|
||||
main._adjust_args_and_chdir(Args())
|
||||
main._adjust_args_and_chdir(_args())
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_noop(in_git_dir):
|
||||
args = Args(command='run', files=['f1', 'f2'])
|
||||
args = _args(command='run', files=['f1', 'f2'])
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == C.CONFIG_FILE
|
||||
|
|
@ -56,7 +52,7 @@ def test_adjust_args_and_chdir_relative_things(in_git_dir):
|
|||
in_git_dir.join('foo/cfg.yaml').ensure()
|
||||
in_git_dir.join('foo').chdir()
|
||||
|
||||
args = Args(command='run', files=['f1', 'f2'], config='cfg.yaml')
|
||||
args = _args(command='run', files=['f1', 'f2'], config='cfg.yaml')
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == os.path.join('foo', 'cfg.yaml')
|
||||
|
|
@ -66,7 +62,7 @@ def test_adjust_args_and_chdir_relative_things(in_git_dir):
|
|||
def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
|
||||
in_git_dir.join('foo').ensure_dir().chdir()
|
||||
|
||||
args = Args()
|
||||
args = _args()
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == C.CONFIG_FILE
|
||||
|
|
@ -75,7 +71,8 @@ def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
|
|||
def test_adjust_args_try_repo_repo_relative(in_git_dir):
|
||||
in_git_dir.join('foo').ensure_dir().chdir()
|
||||
|
||||
args = Args(command='try-repo', repo='../foo', files=[])
|
||||
args = _args(command='try-repo', repo='../foo', files=[])
|
||||
assert args.repo is not None
|
||||
assert os.path.exists(args.repo)
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
|
|
@ -189,4 +186,4 @@ def test_expected_fatal_error_no_git_repo(in_tmpdir, cap_out, mock_store_dir):
|
|||
'An error has occurred: FatalError: git failed. '
|
||||
'Is it installed, and are you in a Git repository directory?'
|
||||
)
|
||||
assert cap_out_lines[-1] == 'Check the log at {}'.format(log_file)
|
||||
assert cap_out_lines[-1] == f'Check the log at {log_file}'
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import tarfile
|
||||
|
||||
from pre_commit import git
|
||||
|
|
@ -46,4 +43,4 @@ def test_main(tmpdir):
|
|||
make_archives.main(('--dest', tmpdir.strpath))
|
||||
|
||||
for archive, _, _ in make_archives.REPOS:
|
||||
assert tmpdir.join('{}.tar.gz'.format(archive)).exists()
|
||||
assert tmpdir.join(f'{archive}.tar.gz').exists()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
from __future__ import unicode_literals
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit import color
|
||||
|
|
@ -23,7 +22,7 @@ from pre_commit import output
|
|||
),
|
||||
)
|
||||
def test_get_hook_message_raises(kwargs):
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(AssertionError):
|
||||
output.get_hook_message('start', **kwargs)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,5 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import distutils.spawn
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
|
@ -15,6 +11,12 @@ from pre_commit.envcontext import Var
|
|||
from pre_commit.util import make_executable
|
||||
|
||||
|
||||
def _echo_exe() -> str:
|
||||
exe = distutils.spawn.find_executable('echo')
|
||||
assert exe is not None
|
||||
return exe
|
||||
|
||||
|
||||
def test_file_doesnt_exist():
|
||||
assert parse_shebang.parse_filename('herp derp derp') == ()
|
||||
|
||||
|
|
@ -31,8 +33,7 @@ def test_find_executable_full_path():
|
|||
|
||||
|
||||
def test_find_executable_on_path():
|
||||
expected = distutils.spawn.find_executable('echo')
|
||||
assert parse_shebang.find_executable('echo') == expected
|
||||
assert parse_shebang.find_executable('echo') == _echo_exe()
|
||||
|
||||
|
||||
def test_find_executable_not_found_none():
|
||||
|
|
@ -42,8 +43,8 @@ def test_find_executable_not_found_none():
|
|||
def write_executable(shebang, filename='run'):
|
||||
os.mkdir('bin')
|
||||
path = os.path.join('bin', filename)
|
||||
with io.open(path, 'w') as f:
|
||||
f.write('#!{}'.format(shebang))
|
||||
with open(path, 'w') as f:
|
||||
f.write(f'#!{shebang}')
|
||||
make_executable(path)
|
||||
return path
|
||||
|
||||
|
|
@ -106,7 +107,7 @@ def test_normexe_is_a_directory(tmpdir):
|
|||
with pytest.raises(OSError) as excinfo:
|
||||
parse_shebang.normexe(exe)
|
||||
msg, = excinfo.value.args
|
||||
assert msg == 'Executable `{}` is a directory'.format(exe)
|
||||
assert msg == f'Executable `{exe}` is a directory'
|
||||
|
||||
|
||||
def test_normexe_already_full_path():
|
||||
|
|
@ -114,30 +115,29 @@ def test_normexe_already_full_path():
|
|||
|
||||
|
||||
def test_normexe_gives_full_path():
|
||||
expected = distutils.spawn.find_executable('echo')
|
||||
assert parse_shebang.normexe('echo') == expected
|
||||
assert os.sep in expected
|
||||
assert parse_shebang.normexe('echo') == _echo_exe()
|
||||
assert os.sep in _echo_exe()
|
||||
|
||||
|
||||
def test_normalize_cmd_trivial():
|
||||
cmd = (distutils.spawn.find_executable('echo'), 'hi')
|
||||
cmd = (_echo_exe(), 'hi')
|
||||
assert parse_shebang.normalize_cmd(cmd) == cmd
|
||||
|
||||
|
||||
def test_normalize_cmd_PATH():
|
||||
cmd = ('echo', '--version')
|
||||
expected = (distutils.spawn.find_executable('echo'), '--version')
|
||||
expected = (_echo_exe(), '--version')
|
||||
assert parse_shebang.normalize_cmd(cmd) == expected
|
||||
|
||||
|
||||
def test_normalize_cmd_shebang(in_tmpdir):
|
||||
echo = distutils.spawn.find_executable('echo').replace(os.sep, '/')
|
||||
echo = _echo_exe().replace(os.sep, '/')
|
||||
path = write_executable(echo)
|
||||
assert parse_shebang.normalize_cmd((path,)) == (echo, path)
|
||||
|
||||
|
||||
def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
|
||||
echo = distutils.spawn.find_executable('echo').replace(os.sep, '/')
|
||||
echo = _echo_exe().replace(os.sep, '/')
|
||||
path = write_executable(echo)
|
||||
with bin_on_path():
|
||||
ret = parse_shebang.normalize_cmd(('run',))
|
||||
|
|
@ -145,7 +145,7 @@ def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
|
|||
|
||||
|
||||
def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir):
|
||||
echo = distutils.spawn.find_executable('echo')
|
||||
echo = _echo_exe()
|
||||
path = write_executable('/usr/bin/env echo')
|
||||
with bin_on_path():
|
||||
ret = parse_shebang.normalize_cmd(('run',))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
|
||||
import pytest
|
||||
|
|
|
|||
|
|
@ -1,13 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from unittest import mock
|
||||
|
||||
import cfgv
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
|
|
@ -312,7 +311,7 @@ def test_golang_hook(tempdir_factory, store):
|
|||
|
||||
def test_golang_hook_still_works_when_gobin_is_set(tempdir_factory, store):
|
||||
gobin_dir = tempdir_factory.get()
|
||||
with envcontext([('GOBIN', gobin_dir)]):
|
||||
with envcontext((('GOBIN', gobin_dir),)):
|
||||
test_golang_hook(tempdir_factory, store)
|
||||
assert os.listdir(gobin_dir) == []
|
||||
|
||||
|
|
@ -473,7 +472,7 @@ def _norm_pwd(path):
|
|||
# Under windows bash's temp and windows temp is different.
|
||||
# This normalizes to the bash /tmp
|
||||
return cmd_output_b(
|
||||
'bash', '-c', "cd '{}' && pwd".format(path),
|
||||
'bash', '-c', f"cd '{path}' && pwd",
|
||||
)[1].strip()
|
||||
|
||||
|
||||
|
|
@ -766,7 +765,7 @@ def test_local_python_repo(store, local_python_config):
|
|||
|
||||
|
||||
def test_default_language_version(store, local_python_config):
|
||||
config = {
|
||||
config: Dict[str, Any] = {
|
||||
'default_language_version': {'python': 'fake'},
|
||||
'default_stages': ['commit'],
|
||||
'repos': [local_python_config],
|
||||
|
|
@ -783,7 +782,7 @@ def test_default_language_version(store, local_python_config):
|
|||
|
||||
|
||||
def test_default_stages(store, local_python_config):
|
||||
config = {
|
||||
config: Dict[str, Any] = {
|
||||
'default_language_version': {'python': C.DEFAULT},
|
||||
'default_stages': ['commit'],
|
||||
'repos': [local_python_config],
|
||||
|
|
@ -844,7 +843,7 @@ def test_manifest_hooks(tempdir_factory, store):
|
|||
hook = _get_hook(config, store, 'bash_hook')
|
||||
|
||||
assert hook == Hook(
|
||||
src='file://{}'.format(path),
|
||||
src=f'file://{path}',
|
||||
prefix=Prefix(mock.ANY),
|
||||
additional_dependencies=[],
|
||||
alias='',
|
||||
|
|
|
|||
|
|
@ -1,8 +1,3 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import itertools
|
||||
import os.path
|
||||
import shutil
|
||||
|
|
@ -29,7 +24,8 @@ def patch_dir(tempdir_factory):
|
|||
|
||||
def get_short_git_status():
|
||||
git_status = cmd_output('git', 'status', '-s')[1]
|
||||
return dict(reversed(line.split()) for line in git_status.splitlines())
|
||||
line_parts = [line.split() for line in git_status.splitlines()]
|
||||
return {v: k for k, v in line_parts}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
@ -47,7 +43,7 @@ def _test_foo_state(
|
|||
encoding='UTF-8',
|
||||
):
|
||||
assert os.path.exists(path.foo_filename)
|
||||
with io.open(path.foo_filename, encoding=encoding) as f:
|
||||
with open(path.foo_filename, encoding=encoding) as f:
|
||||
assert f.read() == foo_contents
|
||||
actual_status = get_short_git_status()['foo']
|
||||
assert status == actual_status
|
||||
|
|
@ -64,7 +60,7 @@ def test_foo_nothing_unstaged(foo_staged, patch_dir):
|
|||
|
||||
|
||||
def test_foo_something_unstaged(foo_staged, patch_dir):
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write('herp\nderp\n')
|
||||
|
||||
_test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
|
||||
|
|
@ -76,7 +72,7 @@ def test_foo_something_unstaged(foo_staged, patch_dir):
|
|||
|
||||
|
||||
def test_does_not_crash_patch_dir_does_not_exist(foo_staged, patch_dir):
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write('hello\nworld\n')
|
||||
|
||||
shutil.rmtree(patch_dir)
|
||||
|
|
@ -97,7 +93,7 @@ def test_foo_something_unstaged_diff_color_always(foo_staged, patch_dir):
|
|||
|
||||
|
||||
def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS + '9\n')
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS + '9\n', 'AM')
|
||||
|
|
@ -106,7 +102,7 @@ def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
|
|||
_test_foo_state(foo_staged)
|
||||
|
||||
# Modify the file as part of the "pre-commit"
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'a'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
|
||||
|
|
@ -115,7 +111,7 @@ def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
|
|||
|
||||
|
||||
def test_foo_both_modify_conflicting(foo_staged, patch_dir):
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'a'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
|
||||
|
|
@ -124,7 +120,7 @@ def test_foo_both_modify_conflicting(foo_staged, patch_dir):
|
|||
_test_foo_state(foo_staged)
|
||||
|
||||
# Modify in the same place as the stashed diff
|
||||
with io.open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'b'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'b'), 'AM')
|
||||
|
|
@ -142,8 +138,8 @@ def img_staged(in_git_dir):
|
|||
|
||||
def _test_img_state(path, expected_file='img1.jpg', status='A'):
|
||||
assert os.path.exists(path.img_filename)
|
||||
with io.open(path.img_filename, 'rb') as f1:
|
||||
with io.open(get_resource_path(expected_file), 'rb') as f2:
|
||||
with open(path.img_filename, 'rb') as f1:
|
||||
with open(get_resource_path(expected_file), 'rb') as f2:
|
||||
assert f1.read() == f2.read()
|
||||
actual_status = get_short_git_status()['img.jpg']
|
||||
assert status == actual_status
|
||||
|
|
@ -248,7 +244,7 @@ def test_sub_something_unstaged(sub_staged, patch_dir):
|
|||
|
||||
def test_stage_utf8_changes(foo_staged, patch_dir):
|
||||
contents = '\u2603'
|
||||
with io.open('foo', 'w', encoding='UTF-8') as foo_file:
|
||||
with open('foo', 'w', encoding='UTF-8') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM')
|
||||
|
|
@ -260,7 +256,7 @@ def test_stage_utf8_changes(foo_staged, patch_dir):
|
|||
def test_stage_non_utf8_changes(foo_staged, patch_dir):
|
||||
contents = 'ú'
|
||||
# Produce a latin-1 diff
|
||||
with io.open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
with open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
|
|
@ -282,14 +278,14 @@ def test_non_utf8_conflicting_diff(foo_staged, patch_dir):
|
|||
# Previously, the error message (though discarded immediately) was being
|
||||
# decoded with the UTF-8 codec (causing a crash)
|
||||
contents = 'ú \n'
|
||||
with io.open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
with open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
# Create a conflicting diff that will need to be rolled back
|
||||
with io.open('foo', 'w') as foo_file:
|
||||
with open('foo', 'w') as foo_file:
|
||||
foo_file.write('')
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import sqlite3
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.store import _get_default_directory
|
||||
|
|
@ -53,7 +48,7 @@ def test_store_init(store):
|
|||
# Should create the store directory
|
||||
assert os.path.exists(store.directory)
|
||||
# Should create a README file indicating what the directory is about
|
||||
with io.open(os.path.join(store.directory, 'README')) as readme_file:
|
||||
with open(os.path.join(store.directory, 'README')) as readme_file:
|
||||
readme_contents = readme_file.read()
|
||||
for text_line in (
|
||||
'This directory is maintained by the pre-commit project.',
|
||||
|
|
@ -93,7 +88,7 @@ def test_clone_cleans_up_on_checkout_failure(store):
|
|||
# This raises an exception because you can't clone something that
|
||||
# doesn't exist!
|
||||
store.clone('/i_dont_exist_lol', 'fake_rev')
|
||||
assert '/i_dont_exist_lol' in six.text_type(excinfo.value)
|
||||
assert '/i_dont_exist_lol' in str(excinfo.value)
|
||||
|
||||
repo_dirs = [
|
||||
d for d in os.listdir(store.directory) if d.startswith('repo')
|
||||
|
|
@ -125,7 +120,7 @@ def test_clone_shallow_failure_fallback_to_complete(
|
|||
|
||||
# Force shallow clone failure
|
||||
def fake_shallow_clone(self, *args, **kwargs):
|
||||
raise CalledProcessError(None, None, None, None, None)
|
||||
raise CalledProcessError(1, (), 0, b'', None)
|
||||
store._shallow_clone = fake_shallow_clone
|
||||
|
||||
ret = store.clone(path, rev)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
|
|
@ -17,9 +15,9 @@ from pre_commit.util import tmpdir
|
|||
|
||||
|
||||
def test_CalledProcessError_str():
|
||||
error = CalledProcessError(1, [str('exe')], 0, b'output', b'errors')
|
||||
error = CalledProcessError(1, ('exe',), 0, b'output', b'errors')
|
||||
assert str(error) == (
|
||||
"command: ['exe']\n"
|
||||
"command: ('exe',)\n"
|
||||
'return code: 1\n'
|
||||
'expected return code: 0\n'
|
||||
'stdout:\n'
|
||||
|
|
@ -30,9 +28,9 @@ def test_CalledProcessError_str():
|
|||
|
||||
|
||||
def test_CalledProcessError_str_nooutput():
|
||||
error = CalledProcessError(1, [str('exe')], 0, b'', b'')
|
||||
error = CalledProcessError(1, ('exe',), 0, b'', b'')
|
||||
assert str(error) == (
|
||||
"command: ['exe']\n"
|
||||
"command: ('exe',)\n"
|
||||
'return code: 1\n'
|
||||
'expected return code: 0\n'
|
||||
'stdout: (none)\n'
|
||||
|
|
|
|||
|
|
@ -1,15 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import concurrent.futures
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Tuple
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
from pre_commit import xargs
|
||||
|
|
@ -30,19 +26,10 @@ def test_environ_size(env, expected):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def win32_py2_mock():
|
||||
def win32_mock():
|
||||
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
|
||||
with mock.patch.object(sys, 'platform', 'win32'):
|
||||
with mock.patch.object(six, 'PY2', True):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def win32_py3_mock():
|
||||
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
|
||||
with mock.patch.object(sys, 'platform', 'win32'):
|
||||
with mock.patch.object(six, 'PY2', False):
|
||||
yield
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
@ -82,7 +69,7 @@ def test_partition_limits():
|
|||
)
|
||||
|
||||
|
||||
def test_partition_limit_win32_py3(win32_py3_mock):
|
||||
def test_partition_limit_win32(win32_mock):
|
||||
cmd = ('ninechars',)
|
||||
# counted as half because of utf-16 encode
|
||||
varargs = ('😑' * 5,)
|
||||
|
|
@ -90,13 +77,6 @@ def test_partition_limit_win32_py3(win32_py3_mock):
|
|||
assert ret == (cmd + varargs,)
|
||||
|
||||
|
||||
def test_partition_limit_win32_py2(win32_py2_mock):
|
||||
cmd = ('ninechars',)
|
||||
varargs = ('😑' * 5,) # 4 bytes * 5
|
||||
ret = xargs.partition(cmd, varargs, 1, _max_length=31)
|
||||
assert ret == (cmd + varargs,)
|
||||
|
||||
|
||||
def test_partition_limit_linux(linux_mock):
|
||||
cmd = ('ninechars',)
|
||||
varargs = ('😑' * 5,)
|
||||
|
|
@ -187,9 +167,8 @@ def test_xargs_concurrency():
|
|||
|
||||
def test_thread_mapper_concurrency_uses_threadpoolexecutor_map():
|
||||
with xargs._thread_mapper(10) as thread_map:
|
||||
assert isinstance(
|
||||
thread_map.__self__, concurrent.futures.ThreadPoolExecutor,
|
||||
) is True
|
||||
_self = thread_map.__self__ # type: ignore
|
||||
assert isinstance(_self, concurrent.futures.ThreadPoolExecutor)
|
||||
|
||||
|
||||
def test_thread_mapper_concurrency_uses_regular_map():
|
||||
|
|
@ -199,7 +178,7 @@ def test_thread_mapper_concurrency_uses_regular_map():
|
|||
|
||||
def test_xargs_propagate_kwargs_to_cmd():
|
||||
env = {'PRE_COMMIT_TEST_VAR': 'Pre commit is awesome'}
|
||||
cmd = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--')
|
||||
cmd: Tuple[str, ...] = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--')
|
||||
cmd = parse_shebang.normalize_cmd(cmd)
|
||||
|
||||
ret, stdout = xargs.xargs(cmd, ('1',), env=env)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue