Merge pull request #1277 from pre-commit/pyupgrade

Drop python 2 / python3.5 support in pre-commit
This commit is contained in:
Anthony Sottile 2020-01-12 10:41:30 -08:00 committed by GitHub
commit b2faf339ce
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
101 changed files with 1280 additions and 1040 deletions

View file

@ -25,6 +25,10 @@ exclude_lines =
^\s*return NotImplemented\b ^\s*return NotImplemented\b
^\s*raise$ ^\s*raise$
# Ignore typing-related things
^if (False|TYPE_CHECKING):
: \.\.\.$
# Don't complain if non-runnable code isn't run: # Don't complain if non-runnable code isn't run:
^if __name__ == ['"]__main__['"]:$ ^if __name__ == ['"]__main__['"]:$

16
.gitignore vendored
View file

@ -1,14 +1,8 @@
*.egg-info *.egg-info
*.iml
*.py[co] *.py[co]
.*.sw[a-z] /.coverage
.coverage /.mypy_cache
.idea /.pytest_cache
.project /.tox
.pydevproject /dist
.tox
.venv.touch
/venv* /venv*
coverage-html
dist
.pytest_cache

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0 rev: v2.4.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: end-of-file-fixer - id: end-of-file-fixer
@ -12,30 +12,41 @@ repos:
- id: requirements-txt-fixer - id: requirements-txt-fixer
- id: double-quote-string-fixer - id: double-quote-string-fixer
- repo: https://gitlab.com/pycqa/flake8 - repo: https://gitlab.com/pycqa/flake8
rev: 3.7.7 rev: 3.7.9
hooks: hooks:
- id: flake8 - id: flake8
- repo: https://github.com/pre-commit/mirrors-autopep8 - repo: https://github.com/pre-commit/mirrors-autopep8
rev: v1.4.3 rev: v1.4.4
hooks: hooks:
- id: autopep8 - id: autopep8
- repo: https://github.com/pre-commit/pre-commit - repo: https://github.com/pre-commit/pre-commit
rev: v1.14.4 rev: v1.21.0
hooks: hooks:
- id: validate_manifest - id: validate_manifest
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v1.12.0 rev: v1.25.3
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: [--py36-plus]
- repo: https://github.com/asottile/reorder_python_imports - repo: https://github.com/asottile/reorder_python_imports
rev: v1.4.0 rev: v1.9.0
hooks: hooks:
- id: reorder-python-imports - id: reorder-python-imports
language_version: python3 args: [--py3-plus]
- repo: https://github.com/asottile/add-trailing-comma - repo: https://github.com/asottile/add-trailing-comma
rev: v1.0.0 rev: v1.5.0
hooks: hooks:
- id: add-trailing-comma - id: add-trailing-comma
args: [--py36-plus]
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v1.6.0
hooks:
- id: setup-cfg-fmt
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.761
hooks:
- id: mypy
exclude: ^testing/resources/
- repo: meta - repo: meta
hooks: hooks:
- id: check-hooks-apply - id: check-hooks-apply

View file

@ -10,18 +10,17 @@ resources:
type: github type: github
endpoint: github endpoint: github
name: asottile/azure-pipeline-templates name: asottile/azure-pipeline-templates
ref: refs/tags/v0.0.15 ref: refs/tags/v1.0.0
jobs: jobs:
- template: job--pre-commit.yml@asottile - template: job--pre-commit.yml@asottile
- template: job--python-tox.yml@asottile - template: job--python-tox.yml@asottile
parameters: parameters:
toxenvs: [py27, py37] toxenvs: [py37]
os: windows os: windows
additional_variables: additional_variables:
COVERAGE_IGNORE_WINDOWS: '# pragma: windows no cover' COVERAGE_IGNORE_WINDOWS: '# pragma: windows no cover'
TOX_TESTENV_PASSENV: COVERAGE_IGNORE_WINDOWS TOX_TESTENV_PASSENV: COVERAGE_IGNORE_WINDOWS
TEMP: C:\Temp # remove when dropping python2
pre_test: pre_test:
- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
displayName: Add conda to PATH displayName: Add conda to PATH
@ -39,7 +38,7 @@ jobs:
displayName: install swift displayName: install swift
- template: job--python-tox.yml@asottile - template: job--python-tox.yml@asottile
parameters: parameters:
toxenvs: [pypy, pypy3, py27, py36, py37, py38] toxenvs: [pypy3, py36, py37, py38]
os: linux os: linux
pre_test: pre_test:
- task: UseRubyVersion@0 - task: UseRubyVersion@0

View file

@ -1,5 +1,3 @@
from __future__ import absolute_import
from pre_commit.main import main from pre_commit.main import main

View file

@ -1,11 +1,12 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse import argparse
import functools import functools
import logging import logging
import pipes import pipes
import sys import sys
from typing import Any
from typing import Dict
from typing import Optional
from typing import Sequence
import cfgv import cfgv
from aspy.yaml import ordered_load from aspy.yaml import ordered_load
@ -21,7 +22,7 @@ logger = logging.getLogger('pre_commit')
check_string_regex = cfgv.check_and(cfgv.check_string, cfgv.check_regex) check_string_regex = cfgv.check_and(cfgv.check_string, cfgv.check_regex)
def check_type_tag(tag): def check_type_tag(tag: str) -> None:
if tag not in ALL_TAGS: if tag not in ALL_TAGS:
raise cfgv.ValidationError( raise cfgv.ValidationError(
'Type tag {!r} is not recognized. ' 'Type tag {!r} is not recognized. '
@ -29,7 +30,7 @@ def check_type_tag(tag):
) )
def check_min_version(version): def check_min_version(version: str) -> None:
if parse_version(version) > parse_version(C.VERSION): if parse_version(version) > parse_version(C.VERSION):
raise cfgv.ValidationError( raise cfgv.ValidationError(
'pre-commit version {} is required but version {} is installed. ' 'pre-commit version {} is required but version {} is installed. '
@ -39,7 +40,7 @@ def check_min_version(version):
) )
def _make_argparser(filenames_help): def _make_argparser(filenames_help: str) -> argparse.ArgumentParser:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help=filenames_help) parser.add_argument('filenames', nargs='*', help=filenames_help)
parser.add_argument('-V', '--version', action='version', version=C.VERSION) parser.add_argument('-V', '--version', action='version', version=C.VERSION)
@ -89,7 +90,7 @@ load_manifest = functools.partial(
) )
def validate_manifest_main(argv=None): def validate_manifest_main(argv: Optional[Sequence[str]] = None) -> int:
parser = _make_argparser('Manifest filenames.') parser = _make_argparser('Manifest filenames.')
args = parser.parse_args(argv) args = parser.parse_args(argv)
ret = 0 ret = 0
@ -106,11 +107,11 @@ LOCAL = 'local'
META = 'meta' META = 'meta'
class MigrateShaToRev(object): class MigrateShaToRev:
key = 'rev' key = 'rev'
@staticmethod @staticmethod
def _cond(key): def _cond(key: str) -> cfgv.Conditional:
return cfgv.Conditional( return cfgv.Conditional(
key, cfgv.check_string, key, cfgv.check_string,
condition_key='repo', condition_key='repo',
@ -118,7 +119,7 @@ class MigrateShaToRev(object):
ensure_absent=True, ensure_absent=True,
) )
def check(self, dct): def check(self, dct: Dict[str, Any]) -> None:
if dct.get('repo') in {LOCAL, META}: if dct.get('repo') in {LOCAL, META}:
self._cond('rev').check(dct) self._cond('rev').check(dct)
self._cond('sha').check(dct) self._cond('sha').check(dct)
@ -129,14 +130,14 @@ class MigrateShaToRev(object):
else: else:
self._cond('rev').check(dct) self._cond('rev').check(dct)
def apply_default(self, dct): def apply_default(self, dct: Dict[str, Any]) -> None:
if 'sha' in dct: if 'sha' in dct:
dct['rev'] = dct.pop('sha') dct['rev'] = dct.pop('sha')
remove_default = cfgv.Required.remove_default remove_default = cfgv.Required.remove_default
def _entry(modname): def _entry(modname: str) -> str:
"""the hook `entry` is passed through `shlex.split()` by the command """the hook `entry` is passed through `shlex.split()` by the command
runner, so to prevent issues with spaces and backslashes (on Windows) runner, so to prevent issues with spaces and backslashes (on Windows)
it must be quoted here. it must be quoted here.
@ -146,13 +147,21 @@ def _entry(modname):
) )
def warn_unknown_keys_root(extra, orig_keys, dct): def warn_unknown_keys_root(
extra: Sequence[str],
orig_keys: Sequence[str],
dct: Dict[str, str],
) -> None:
logger.warning( logger.warning(
'Unexpected key(s) present at root: {}'.format(', '.join(extra)), 'Unexpected key(s) present at root: {}'.format(', '.join(extra)),
) )
def warn_unknown_keys_repo(extra, orig_keys, dct): def warn_unknown_keys_repo(
extra: Sequence[str],
orig_keys: Sequence[str],
dct: Dict[str, str],
) -> None:
logger.warning( logger.warning(
'Unexpected key(s) present on {}: {}'.format( 'Unexpected key(s) present on {}: {}'.format(
dct['repo'], ', '.join(extra), dct['repo'], ', '.join(extra),
@ -202,7 +211,7 @@ META_HOOK_DICT = cfgv.Map(
if item.key in {'name', 'language', 'entry'} else if item.key in {'name', 'language', 'entry'} else
item item
for item in MANIFEST_HOOK_DICT.items for item in MANIFEST_HOOK_DICT.items
]) ]),
) )
CONFIG_HOOK_DICT = cfgv.Map( CONFIG_HOOK_DICT = cfgv.Map(
'Hook', 'id', 'Hook', 'id',
@ -217,7 +226,7 @@ CONFIG_HOOK_DICT = cfgv.Map(
cfgv.OptionalNoDefault(item.key, item.check_fn) cfgv.OptionalNoDefault(item.key, item.check_fn)
for item in MANIFEST_HOOK_DICT.items for item in MANIFEST_HOOK_DICT.items
if item.key != 'id' if item.key != 'id'
] ],
) )
CONFIG_REPO_DICT = cfgv.Map( CONFIG_REPO_DICT = cfgv.Map(
'Repository', 'repo', 'Repository', 'repo',
@ -243,7 +252,7 @@ CONFIG_REPO_DICT = cfgv.Map(
DEFAULT_LANGUAGE_VERSION = cfgv.Map( DEFAULT_LANGUAGE_VERSION = cfgv.Map(
'DefaultLanguageVersion', None, 'DefaultLanguageVersion', None,
cfgv.NoAdditionalKeys(all_languages), cfgv.NoAdditionalKeys(all_languages),
*[cfgv.Optional(x, cfgv.check_string, C.DEFAULT) for x in all_languages] *[cfgv.Optional(x, cfgv.check_string, C.DEFAULT) for x in all_languages],
) )
CONFIG_SCHEMA = cfgv.Map( CONFIG_SCHEMA = cfgv.Map(
'Config', None, 'Config', None,
@ -284,7 +293,7 @@ class InvalidConfigError(FatalError):
pass pass
def ordered_load_normalize_legacy_config(contents): def ordered_load_normalize_legacy_config(contents: str) -> Dict[str, Any]:
data = ordered_load(contents) data = ordered_load(contents)
if isinstance(data, list): if isinstance(data, list):
# TODO: Once happy, issue a deprecation warning and instructions # TODO: Once happy, issue a deprecation warning and instructions
@ -301,7 +310,7 @@ load_config = functools.partial(
) )
def validate_config_main(argv=None): def validate_config_main(argv: Optional[Sequence[str]] = None) -> int:
parser = _make_argparser('Config filenames.') parser = _make_argparser('Config filenames.')
args = parser.parse_args(argv) args = parser.parse_args(argv)
ret = 0 ret = 0

View file

@ -1,14 +1,12 @@
from __future__ import unicode_literals
import os import os
import sys import sys
terminal_supports_color = True terminal_supports_color = True
if os.name == 'nt': # pragma: no cover (windows) if sys.platform == 'win32': # pragma: no cover (windows)
from pre_commit.color_windows import enable_virtual_terminal_processing from pre_commit.color_windows import enable_virtual_terminal_processing
try: try:
enable_virtual_terminal_processing() enable_virtual_terminal_processing()
except WindowsError: except OSError:
terminal_supports_color = False terminal_supports_color = False
RED = '\033[41m' RED = '\033[41m'
@ -23,7 +21,7 @@ class InvalidColorSetting(ValueError):
pass pass
def format_color(text, color, use_color_setting): def format_color(text: str, color: str, use_color_setting: bool) -> str:
"""Format text with color. """Format text with color.
Args: Args:
@ -34,13 +32,13 @@ def format_color(text, color, use_color_setting):
if not use_color_setting: if not use_color_setting:
return text return text
else: else:
return '{}{}{}'.format(color, text, NORMAL) return f'{color}{text}{NORMAL}'
COLOR_CHOICES = ('auto', 'always', 'never') COLOR_CHOICES = ('auto', 'always', 'never')
def use_color(setting): def use_color(setting: str) -> bool:
"""Choose whether to use color based on the command argument. """Choose whether to use color based on the command argument.
Args: Args:

View file

@ -1,13 +1,14 @@
from __future__ import absolute_import import sys
from __future__ import unicode_literals assert sys.platform == 'win32'
from ctypes import POINTER # noqa: E402
from ctypes import windll # noqa: E402
from ctypes import WinError # noqa: E402
from ctypes import WINFUNCTYPE # noqa: E402
from ctypes.wintypes import BOOL # noqa: E402
from ctypes.wintypes import DWORD # noqa: E402
from ctypes.wintypes import HANDLE # noqa: E402
from ctypes import POINTER
from ctypes import windll
from ctypes import WinError
from ctypes import WINFUNCTYPE
from ctypes.wintypes import BOOL
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
STD_OUTPUT_HANDLE = -11 STD_OUTPUT_HANDLE = -11
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4

View file

@ -1,11 +1,13 @@
from __future__ import print_function
from __future__ import unicode_literals
import collections
import os.path import os.path
import re import re
from typing import Any
from typing import Dict
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
import six
from aspy.yaml import ordered_dump from aspy.yaml import ordered_dump
from aspy.yaml import ordered_load from aspy.yaml import ordered_load
@ -18,20 +20,23 @@ from pre_commit.clientlib import load_manifest
from pre_commit.clientlib import LOCAL from pre_commit.clientlib import LOCAL
from pre_commit.clientlib import META from pre_commit.clientlib import META
from pre_commit.commands.migrate_config import migrate_config from pre_commit.commands.migrate_config import migrate_config
from pre_commit.store import Store
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.util import tmpdir from pre_commit.util import tmpdir
class RevInfo(collections.namedtuple('RevInfo', ('repo', 'rev', 'frozen'))): class RevInfo(NamedTuple):
__slots__ = () repo: str
rev: str
frozen: Optional[str]
@classmethod @classmethod
def from_config(cls, config): def from_config(cls, config: Dict[str, Any]) -> 'RevInfo':
return cls(config['repo'], config['rev'], None) return cls(config['repo'], config['rev'], None)
def update(self, tags_only, freeze): def update(self, tags_only: bool, freeze: bool) -> 'RevInfo':
if tags_only: if tags_only:
tag_cmd = ('git', 'describe', 'FETCH_HEAD', '--tags', '--abbrev=0') tag_cmd = ('git', 'describe', 'FETCH_HEAD', '--tags', '--abbrev=0')
else: else:
@ -59,12 +64,16 @@ class RepositoryCannotBeUpdatedError(RuntimeError):
pass pass
def _check_hooks_still_exist_at_rev(repo_config, info, store): def _check_hooks_still_exist_at_rev(
repo_config: Dict[str, Any],
info: RevInfo,
store: Store,
) -> None:
try: try:
path = store.clone(repo_config['repo'], info.rev) path = store.clone(repo_config['repo'], info.rev)
manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE)) manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
except InvalidManifestError as e: except InvalidManifestError as e:
raise RepositoryCannotBeUpdatedError(six.text_type(e)) raise RepositoryCannotBeUpdatedError(str(e))
# See if any of our hooks were deleted with the new commits # See if any of our hooks were deleted with the new commits
hooks = {hook['id'] for hook in repo_config['hooks']} hooks = {hook['id'] for hook in repo_config['hooks']}
@ -80,7 +89,11 @@ REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
REV_LINE_FMT = '{}rev:{}{}{}{}' REV_LINE_FMT = '{}rev:{}{}{}{}'
def _original_lines(path, rev_infos, retry=False): def _original_lines(
path: str,
rev_infos: List[Optional[RevInfo]],
retry: bool = False,
) -> Tuple[List[str], List[int]]:
"""detect `rev:` lines or reformat the file""" """detect `rev:` lines or reformat the file"""
with open(path) as f: with open(path) as f:
original = f.read() original = f.read()
@ -97,7 +110,7 @@ def _original_lines(path, rev_infos, retry=False):
return _original_lines(path, rev_infos, retry=True) return _original_lines(path, rev_infos, retry=True)
def _write_new_config(path, rev_infos): def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
lines, idxs = _original_lines(path, rev_infos) lines, idxs = _original_lines(path, rev_infos)
for idx, rev_info in zip(idxs, rev_infos): for idx, rev_info in zip(idxs, rev_infos):
@ -108,7 +121,7 @@ def _write_new_config(path, rev_infos):
new_rev_s = ordered_dump({'rev': rev_info.rev}, **C.YAML_DUMP_KWARGS) new_rev_s = ordered_dump({'rev': rev_info.rev}, **C.YAML_DUMP_KWARGS)
new_rev = new_rev_s.split(':', 1)[1].strip() new_rev = new_rev_s.split(':', 1)[1].strip()
if rev_info.frozen is not None: if rev_info.frozen is not None:
comment = ' # frozen: {}'.format(rev_info.frozen) comment = f' # frozen: {rev_info.frozen}'
elif match.group(4).strip().startswith('# frozen:'): elif match.group(4).strip().startswith('# frozen:'):
comment = '' comment = ''
else: else:
@ -121,11 +134,17 @@ def _write_new_config(path, rev_infos):
f.write(''.join(lines)) f.write(''.join(lines))
def autoupdate(config_file, store, tags_only, freeze, repos=()): def autoupdate(
config_file: str,
store: Store,
tags_only: bool,
freeze: bool,
repos: Sequence[str] = (),
) -> int:
"""Auto-update the pre-commit config to the latest versions of repos.""" """Auto-update the pre-commit config to the latest versions of repos."""
migrate_config(config_file, quiet=True) migrate_config(config_file, quiet=True)
retv = 0 retv = 0
rev_infos = [] rev_infos: List[Optional[RevInfo]] = []
changed = False changed = False
config = load_config(config_file) config = load_config(config_file)
@ -138,7 +157,7 @@ def autoupdate(config_file, store, tags_only, freeze, repos=()):
rev_infos.append(None) rev_infos.append(None)
continue continue
output.write('Updating {} ... '.format(info.repo)) output.write(f'Updating {info.repo} ... ')
new_info = info.update(tags_only=tags_only, freeze=freeze) new_info = info.update(tags_only=tags_only, freeze=freeze)
try: try:
_check_hooks_still_exist_at_rev(repo_config, new_info, store) _check_hooks_still_exist_at_rev(repo_config, new_info, store)
@ -151,10 +170,10 @@ def autoupdate(config_file, store, tags_only, freeze, repos=()):
if new_info.rev != info.rev: if new_info.rev != info.rev:
changed = True changed = True
if new_info.frozen: if new_info.frozen:
updated_to = '{} (frozen)'.format(new_info.frozen) updated_to = f'{new_info.frozen} (frozen)'
else: else:
updated_to = new_info.rev updated_to = new_info.rev
msg = 'updating {} -> {}.'.format(info.rev, updated_to) msg = f'updating {info.rev} -> {updated_to}.'
output.write_line(msg) output.write_line(msg)
rev_infos.append(new_info) rev_infos.append(new_info)
else: else:

View file

@ -1,16 +1,14 @@
from __future__ import print_function
from __future__ import unicode_literals
import os.path import os.path
from pre_commit import output from pre_commit import output
from pre_commit.store import Store
from pre_commit.util import rmtree from pre_commit.util import rmtree
def clean(store): def clean(store: Store) -> int:
legacy_path = os.path.expanduser('~/.pre-commit') legacy_path = os.path.expanduser('~/.pre-commit')
for directory in (store.directory, legacy_path): for directory in (store.directory, legacy_path):
if os.path.exists(directory): if os.path.exists(directory):
rmtree(directory) rmtree(directory)
output.write_line('Cleaned {}.'.format(directory)) output.write_line(f'Cleaned {directory}.')
return 0 return 0

View file

@ -1,7 +1,8 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
from typing import Any
from typing import Dict
from typing import Set
from typing import Tuple
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import output from pre_commit import output
@ -11,9 +12,15 @@ from pre_commit.clientlib import load_config
from pre_commit.clientlib import load_manifest from pre_commit.clientlib import load_manifest
from pre_commit.clientlib import LOCAL from pre_commit.clientlib import LOCAL
from pre_commit.clientlib import META from pre_commit.clientlib import META
from pre_commit.store import Store
def _mark_used_repos(store, all_repos, unused_repos, repo): def _mark_used_repos(
store: Store,
all_repos: Dict[Tuple[str, str], str],
unused_repos: Set[Tuple[str, str]],
repo: Dict[str, Any],
) -> None:
if repo['repo'] == META: if repo['repo'] == META:
return return
elif repo['repo'] == LOCAL: elif repo['repo'] == LOCAL:
@ -50,7 +57,7 @@ def _mark_used_repos(store, all_repos, unused_repos, repo):
)) ))
def _gc_repos(store): def _gc_repos(store: Store) -> int:
configs = store.select_all_configs() configs = store.select_all_configs()
repos = store.select_all_repos() repos = store.select_all_repos()
@ -76,8 +83,8 @@ def _gc_repos(store):
return len(unused_repos) return len(unused_repos)
def gc(store): def gc(store: Store) -> int:
with store.exclusive_lock(): with store.exclusive_lock():
repos_removed = _gc_repos(store) repos_removed = _gc_repos(store)
output.write_line('{} repo(s) removed.'.format(repos_removed)) output.write_line(f'{repos_removed} repo(s) removed.')
return 0 return 0

View file

@ -1,14 +1,21 @@
import logging import logging
import os.path import os.path
from typing import Sequence
from pre_commit.commands.install_uninstall import install from pre_commit.commands.install_uninstall import install
from pre_commit.store import Store
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
def init_templatedir(config_file, store, directory, hook_types): def init_templatedir(
config_file: str,
store: Store,
directory: str,
hook_types: Sequence[str],
) -> int:
install( install(
config_file, store, hook_types=hook_types, config_file, store, hook_types=hook_types,
overwrite=True, skip_on_missing_config=True, git_dir=directory, overwrite=True, skip_on_missing_config=True, git_dir=directory,
@ -23,5 +30,6 @@ def init_templatedir(config_file, store, directory, hook_types):
if configured_path != dest: if configured_path != dest:
logger.warning('`init.templateDir` not set to the target directory') logger.warning('`init.templateDir` not set to the target directory')
logger.warning( logger.warning(
'maybe `git config --global init.templateDir {}`?'.format(dest), f'maybe `git config --global init.templateDir {dest}`?',
) )
return 0

View file

@ -1,18 +1,18 @@
from __future__ import print_function
from __future__ import unicode_literals
import io
import itertools import itertools
import logging import logging
import os.path import os.path
import shutil import shutil
import sys import sys
from typing import Optional
from typing import Sequence
from typing import Tuple
from pre_commit import git from pre_commit import git
from pre_commit import output from pre_commit import output
from pre_commit.clientlib import load_config from pre_commit.clientlib import load_config
from pre_commit.repository import all_hooks from pre_commit.repository import all_hooks
from pre_commit.repository import install_hook_envs from pre_commit.repository import install_hook_envs
from pre_commit.store import Store
from pre_commit.util import make_executable from pre_commit.util import make_executable
from pre_commit.util import mkdirp from pre_commit.util import mkdirp
from pre_commit.util import resource_text from pre_commit.util import resource_text
@ -33,21 +33,24 @@ TEMPLATE_START = '# start templated\n'
TEMPLATE_END = '# end templated\n' TEMPLATE_END = '# end templated\n'
def _hook_paths(hook_type, git_dir=None): def _hook_paths(
hook_type: str,
git_dir: Optional[str] = None,
) -> Tuple[str, str]:
git_dir = git_dir if git_dir is not None else git.get_git_dir() git_dir = git_dir if git_dir is not None else git.get_git_dir()
pth = os.path.join(git_dir, 'hooks', hook_type) pth = os.path.join(git_dir, 'hooks', hook_type)
return pth, '{}.legacy'.format(pth) return pth, f'{pth}.legacy'
def is_our_script(filename): def is_our_script(filename: str) -> bool:
if not os.path.exists(filename): # pragma: windows no cover (symlink) if not os.path.exists(filename): # pragma: windows no cover (symlink)
return False return False
with io.open(filename) as f: with open(filename) as f:
contents = f.read() contents = f.read()
return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES) return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)
def shebang(): def shebang() -> str:
if sys.platform == 'win32': if sys.platform == 'win32':
py = 'python' py = 'python'
else: else:
@ -63,13 +66,16 @@ def shebang():
break break
else: else:
py = 'python' py = 'python'
return '#!/usr/bin/env {}'.format(py) return f'#!/usr/bin/env {py}'
def _install_hook_script( def _install_hook_script(
config_file, hook_type, config_file: str,
overwrite=False, skip_on_missing_config=False, git_dir=None, hook_type: str,
): overwrite: bool = False,
skip_on_missing_config: bool = False,
git_dir: Optional[str] = None,
) -> None:
hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir) hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir)
mkdirp(os.path.dirname(hook_path)) mkdirp(os.path.dirname(hook_path))
@ -94,7 +100,7 @@ def _install_hook_script(
'SKIP_ON_MISSING_CONFIG': skip_on_missing_config, 'SKIP_ON_MISSING_CONFIG': skip_on_missing_config,
} }
with io.open(hook_path, 'w') as hook_file: with open(hook_path, 'w') as hook_file:
contents = resource_text('hook-tmpl') contents = resource_text('hook-tmpl')
before, rest = contents.split(TEMPLATE_START) before, rest = contents.split(TEMPLATE_START)
to_template, after = rest.split(TEMPLATE_END) to_template, after = rest.split(TEMPLATE_END)
@ -108,14 +114,18 @@ def _install_hook_script(
hook_file.write(TEMPLATE_END + after) hook_file.write(TEMPLATE_END + after)
make_executable(hook_path) make_executable(hook_path)
output.write_line('pre-commit installed at {}'.format(hook_path)) output.write_line(f'pre-commit installed at {hook_path}')
def install( def install(
config_file, store, hook_types, config_file: str,
overwrite=False, hooks=False, store: Store,
skip_on_missing_config=False, git_dir=None, hook_types: Sequence[str],
): overwrite: bool = False,
hooks: bool = False,
skip_on_missing_config: bool = False,
git_dir: Optional[str] = None,
) -> int:
if git.has_core_hookpaths_set(): if git.has_core_hookpaths_set():
logger.error( logger.error(
'Cowardly refusing to install hooks with `core.hooksPath` set.\n' 'Cowardly refusing to install hooks with `core.hooksPath` set.\n'
@ -137,11 +147,12 @@ def install(
return 0 return 0
def install_hooks(config_file, store): def install_hooks(config_file: str, store: Store) -> int:
install_hook_envs(all_hooks(load_config(config_file), store), store) install_hook_envs(all_hooks(load_config(config_file), store), store)
return 0
def _uninstall_hook_script(hook_type): # type: (str) -> None def _uninstall_hook_script(hook_type: str) -> None:
hook_path, legacy_path = _hook_paths(hook_type) hook_path, legacy_path = _hook_paths(hook_type)
# If our file doesn't exist or it isn't ours, gtfo. # If our file doesn't exist or it isn't ours, gtfo.
@ -149,14 +160,14 @@ def _uninstall_hook_script(hook_type): # type: (str) -> None
return return
os.remove(hook_path) os.remove(hook_path)
output.write_line('{} uninstalled'.format(hook_type)) output.write_line(f'{hook_type} uninstalled')
if os.path.exists(legacy_path): if os.path.exists(legacy_path):
os.rename(legacy_path, hook_path) os.rename(legacy_path, hook_path)
output.write_line('Restored previous hooks to {}'.format(hook_path)) output.write_line(f'Restored previous hooks to {hook_path}')
def uninstall(hook_types): def uninstall(hook_types: Sequence[str]) -> int:
for hook_type in hook_types: for hook_type in hook_types:
_uninstall_hook_script(hook_type) _uninstall_hook_script(hook_type)
return 0 return 0

View file

@ -1,23 +1,19 @@
from __future__ import print_function
from __future__ import unicode_literals
import io
import re import re
import yaml import yaml
from aspy.yaml import ordered_load from aspy.yaml import ordered_load
def _indent(s): def _indent(s: str) -> str:
lines = s.splitlines(True) lines = s.splitlines(True)
return ''.join(' ' * 4 + line if line.strip() else line for line in lines) return ''.join(' ' * 4 + line if line.strip() else line for line in lines)
def _is_header_line(line): def _is_header_line(line: str) -> bool:
return (line.startswith(('#', '---')) or not line.strip()) return line.startswith(('#', '---')) or not line.strip()
def _migrate_map(contents): def _migrate_map(contents: str) -> str:
# Find the first non-header line # Find the first non-header line
lines = contents.splitlines(True) lines = contents.splitlines(True)
i = 0 i = 0
@ -41,22 +37,23 @@ def _migrate_map(contents):
return contents return contents
def _migrate_sha_to_rev(contents): def _migrate_sha_to_rev(contents: str) -> str:
reg = re.compile(r'(\n\s+)sha:') reg = re.compile(r'(\n\s+)sha:')
return reg.sub(r'\1rev:', contents) return reg.sub(r'\1rev:', contents)
def migrate_config(config_file, quiet=False): def migrate_config(config_file: str, quiet: bool = False) -> int:
with io.open(config_file) as f: with open(config_file) as f:
orig_contents = contents = f.read() orig_contents = contents = f.read()
contents = _migrate_map(contents) contents = _migrate_map(contents)
contents = _migrate_sha_to_rev(contents) contents = _migrate_sha_to_rev(contents)
if contents != orig_contents: if contents != orig_contents:
with io.open(config_file, 'w') as f: with open(config_file, 'w') as f:
f.write(contents) f.write(contents)
print('Configuration has been migrated.') print('Configuration has been migrated.')
elif not quiet: elif not quiet:
print('Configuration is already migrated.') print('Configuration is already migrated.')
return 0

View file

@ -1,10 +1,17 @@
from __future__ import unicode_literals import argparse
import functools
import logging import logging
import os import os
import re import re
import subprocess import subprocess
import time import time
from typing import Any
from typing import Collection
from typing import Dict
from typing import List
from typing import Sequence
from typing import Set
from typing import Tuple
from identify.identify import tags_from_path from identify.identify import tags_from_path
@ -14,16 +21,23 @@ from pre_commit import output
from pre_commit.clientlib import load_config from pre_commit.clientlib import load_config
from pre_commit.output import get_hook_message from pre_commit.output import get_hook_message
from pre_commit.repository import all_hooks from pre_commit.repository import all_hooks
from pre_commit.repository import Hook
from pre_commit.repository import install_hook_envs from pre_commit.repository import install_hook_envs
from pre_commit.staged_files_only import staged_files_only from pre_commit.staged_files_only import staged_files_only
from pre_commit.store import Store
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.util import EnvironT
from pre_commit.util import noop_context from pre_commit.util import noop_context
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
def filter_by_include_exclude(names, include, exclude): def filter_by_include_exclude(
names: Collection[str],
include: str,
exclude: str,
) -> List[str]:
include_re, exclude_re = re.compile(include), re.compile(exclude) include_re, exclude_re = re.compile(include), re.compile(exclude)
return [ return [
filename for filename in names filename for filename in names
@ -32,25 +46,26 @@ def filter_by_include_exclude(names, include, exclude):
] ]
class Classifier(object): class Classifier:
def __init__(self, filenames): def __init__(self, filenames: Sequence[str]) -> None:
# on windows we normalize all filenames to use forward slashes # on windows we normalize all filenames to use forward slashes
# this makes it easier to filter using the `files:` regex # this makes it easier to filter using the `files:` regex
# this also makes improperly quoted shell-based hooks work better # this also makes improperly quoted shell-based hooks work better
# see #1173 # see #1173
if os.altsep == '/' and os.sep == '\\': if os.altsep == '/' and os.sep == '\\':
filenames = (f.replace(os.sep, os.altsep) for f in filenames) filenames = [f.replace(os.sep, os.altsep) for f in filenames]
self.filenames = [f for f in filenames if os.path.lexists(f)] self.filenames = [f for f in filenames if os.path.lexists(f)]
self._types_cache = {}
def _types_for_file(self, filename): @functools.lru_cache(maxsize=None)
try: def _types_for_file(self, filename: str) -> Set[str]:
return self._types_cache[filename] return tags_from_path(filename)
except KeyError:
ret = self._types_cache[filename] = tags_from_path(filename)
return ret
def by_types(self, names, types, exclude_types): def by_types(
self,
names: Sequence[str],
types: Collection[str],
exclude_types: Collection[str],
) -> List[str]:
types, exclude_types = frozenset(types), frozenset(exclude_types) types, exclude_types = frozenset(types), frozenset(exclude_types)
ret = [] ret = []
for filename in names: for filename in names:
@ -59,14 +74,14 @@ class Classifier(object):
ret.append(filename) ret.append(filename)
return ret return ret
def filenames_for_hook(self, hook): def filenames_for_hook(self, hook: Hook) -> Tuple[str, ...]:
names = self.filenames names = self.filenames
names = filter_by_include_exclude(names, hook.files, hook.exclude) names = filter_by_include_exclude(names, hook.files, hook.exclude)
names = self.by_types(names, hook.types, hook.exclude_types) names = self.by_types(names, hook.types, hook.exclude_types)
return names return tuple(names)
def _get_skips(environ): def _get_skips(environ: EnvironT) -> Set[str]:
skips = environ.get('SKIP', '') skips = environ.get('SKIP', '')
return {skip.strip() for skip in skips.split(',') if skip.strip()} return {skip.strip() for skip in skips.split(',') if skip.strip()}
@ -75,11 +90,18 @@ SKIPPED = 'Skipped'
NO_FILES = '(no files to check)' NO_FILES = '(no files to check)'
def _subtle_line(s, use_color): def _subtle_line(s: str, use_color: bool) -> None:
output.write_line(color.format_color(s, color.SUBTLE, use_color)) output.write_line(color.format_color(s, color.SUBTLE, use_color))
def _run_single_hook(classifier, hook, skips, cols, verbose, use_color): def _run_single_hook(
classifier: Classifier,
hook: Hook,
skips: Set[str],
cols: int,
verbose: bool,
use_color: bool,
) -> bool:
filenames = classifier.filenames_for_hook(hook) filenames = classifier.filenames_for_hook(hook)
if hook.id in skips or hook.alias in skips: if hook.id in skips or hook.alias in skips:
@ -117,7 +139,8 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
diff_cmd = ('git', 'diff', '--no-ext-diff') diff_cmd = ('git', 'diff', '--no-ext-diff')
diff_before = cmd_output_b(*diff_cmd, retcode=None) diff_before = cmd_output_b(*diff_cmd, retcode=None)
filenames = tuple(filenames) if hook.pass_filenames else () if not hook.pass_filenames:
filenames = ()
time_before = time.time() time_before = time.time()
retcode, out = hook.run(filenames, use_color) retcode, out = hook.run(filenames, use_color)
duration = round(time.time() - time_before, 2) or 0 duration = round(time.time() - time_before, 2) or 0
@ -136,13 +159,13 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
output.write_line(color.format_color(status, print_color, use_color)) output.write_line(color.format_color(status, print_color, use_color))
if verbose or hook.verbose or retcode or files_modified: if verbose or hook.verbose or retcode or files_modified:
_subtle_line('- hook id: {}'.format(hook.id), use_color) _subtle_line(f'- hook id: {hook.id}', use_color)
if (verbose or hook.verbose) and duration is not None: if (verbose or hook.verbose) and duration is not None:
_subtle_line('- duration: {}s'.format(duration), use_color) _subtle_line(f'- duration: {duration}s', use_color)
if retcode: if retcode:
_subtle_line('- exit code: {}'.format(retcode), use_color) _subtle_line(f'- exit code: {retcode}', use_color)
# Print a message if failing due to file modifications # Print a message if failing due to file modifications
if files_modified: if files_modified:
@ -156,7 +179,7 @@ def _run_single_hook(classifier, hook, skips, cols, verbose, use_color):
return files_modified or bool(retcode) return files_modified or bool(retcode)
def _compute_cols(hooks): def _compute_cols(hooks: Sequence[Hook]) -> int:
"""Compute the number of columns to display hook messages. The widest """Compute the number of columns to display hook messages. The widest
that will be displayed is in the no files skipped case: that will be displayed is in the no files skipped case:
@ -171,7 +194,7 @@ def _compute_cols(hooks):
return max(cols, 80) return max(cols, 80)
def _all_filenames(args): def _all_filenames(args: argparse.Namespace) -> Collection[str]:
if args.origin and args.source: if args.origin and args.source:
return git.get_changed_files(args.origin, args.source) return git.get_changed_files(args.origin, args.source)
elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}: elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
@ -186,7 +209,12 @@ def _all_filenames(args):
return git.get_staged_files() return git.get_staged_files()
def _run_hooks(config, hooks, args, environ): def _run_hooks(
config: Dict[str, Any],
hooks: Sequence[Hook],
args: argparse.Namespace,
environ: EnvironT,
) -> int:
"""Actually run the hooks.""" """Actually run the hooks."""
skips = _get_skips(environ) skips = _get_skips(environ)
cols = _compute_cols(hooks) cols = _compute_cols(hooks)
@ -223,12 +251,12 @@ def _run_hooks(config, hooks, args, environ):
return retval return retval
def _has_unmerged_paths(): def _has_unmerged_paths() -> bool:
_, stdout, _ = cmd_output_b('git', 'ls-files', '--unmerged') _, stdout, _ = cmd_output_b('git', 'ls-files', '--unmerged')
return bool(stdout.strip()) return bool(stdout.strip())
def _has_unstaged_config(config_file): def _has_unstaged_config(config_file: str) -> bool:
retcode, _, _ = cmd_output_b( retcode, _, _ = cmd_output_b(
'git', 'diff', '--no-ext-diff', '--exit-code', config_file, 'git', 'diff', '--no-ext-diff', '--exit-code', config_file,
retcode=None, retcode=None,
@ -237,7 +265,12 @@ def _has_unstaged_config(config_file):
return retcode == 1 return retcode == 1
def run(config_file, store, args, environ=os.environ): def run(
config_file: str,
store: Store,
args: argparse.Namespace,
environ: EnvironT = os.environ,
) -> int:
no_stash = args.all_files or bool(args.files) no_stash = args.all_files or bool(args.files)
# Check if we have unresolved merge conflict files and fail fast. # Check if we have unresolved merge conflict files and fail fast.

View file

@ -1,8 +1,3 @@
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
# TODO: maybe `git ls-remote git://github.com/pre-commit/pre-commit-hooks` to # TODO: maybe `git ls-remote git://github.com/pre-commit/pre-commit-hooks` to
# determine the latest revision? This adds ~200ms from my tests (and is # determine the latest revision? This adds ~200ms from my tests (and is
# significantly faster than https:// or http://). For now, periodically # significantly faster than https:// or http://). For now, periodically
@ -21,6 +16,6 @@ repos:
''' '''
def sample_config(): def sample_config() -> int:
print(SAMPLE_CONFIG, end='') print(SAMPLE_CONFIG, end='')
return 0 return 0

View file

@ -1,9 +1,8 @@
from __future__ import absolute_import import argparse
from __future__ import unicode_literals
import collections import collections
import logging import logging
import os.path import os.path
from typing import Tuple
from aspy.yaml import ordered_dump from aspy.yaml import ordered_dump
@ -20,7 +19,7 @@ from pre_commit.xargs import xargs
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _repo_ref(tmpdir, repo, ref): def _repo_ref(tmpdir: str, repo: str, ref: str) -> Tuple[str, str]:
# if `ref` is explicitly passed, use it # if `ref` is explicitly passed, use it
if ref: if ref:
return repo, ref return repo, ref
@ -50,7 +49,7 @@ def _repo_ref(tmpdir, repo, ref):
return repo, ref return repo, ref
def try_repo(args): def try_repo(args: argparse.Namespace) -> int:
with tmpdir() as tempdir: with tmpdir() as tempdir:
repo, ref = _repo_ref(tempdir, args.repo, args.ref) repo, ref = _repo_ref(tempdir, args.repo, args.ref)

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import sys import sys
if sys.version_info < (3, 8): # pragma: no cover (<PY38) if sys.version_info < (3, 8): # pragma: no cover (<PY38)

View file

@ -1,19 +1,33 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import collections
import contextlib import contextlib
import enum
import os import os
from typing import Generator
from typing import NamedTuple
from typing import Optional
from typing import Tuple
from typing import Union
from pre_commit.util import EnvironT
UNSET = collections.namedtuple('UNSET', ())() class _Unset(enum.Enum):
UNSET = 1
Var = collections.namedtuple('Var', ('name', 'default')) UNSET = _Unset.UNSET
Var.__new__.__defaults__ = ('',)
def format_env(parts, env): class Var(NamedTuple):
name: str
default: str = ''
SubstitutionT = Tuple[Union[str, Var], ...]
ValueT = Union[str, _Unset, SubstitutionT]
PatchesT = Tuple[Tuple[str, ValueT], ...]
def format_env(parts: SubstitutionT, env: EnvironT) -> str:
return ''.join( return ''.join(
env.get(part.name, part.default) if isinstance(part, Var) else part env.get(part.name, part.default) if isinstance(part, Var) else part
for part in parts for part in parts
@ -21,7 +35,10 @@ def format_env(parts, env):
@contextlib.contextmanager @contextlib.contextmanager
def envcontext(patch, _env=None): def envcontext(
patch: PatchesT,
_env: Optional[EnvironT] = None,
) -> Generator[None, None, None]:
"""In this context, `os.environ` is modified according to `patch`. """In this context, `os.environ` is modified according to `patch`.
`patch` is an iterable of 2-tuples (key, value): `patch` is an iterable of 2-tuples (key, value):

View file

@ -1,13 +1,9 @@
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import contextlib import contextlib
import os.path import os.path
import sys import sys
import traceback import traceback
from typing import Generator
import six from typing import Union
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import five from pre_commit import five
@ -19,14 +15,11 @@ class FatalError(RuntimeError):
pass pass
def _to_bytes(exc): def _to_bytes(exc: BaseException) -> bytes:
try: return str(exc).encode('UTF-8')
return bytes(exc)
except Exception:
return six.text_type(exc).encode('UTF-8')
def _log_and_exit(msg, exc, formatted): def _log_and_exit(msg: str, exc: BaseException, formatted: str) -> None:
error_msg = b''.join(( error_msg = b''.join((
five.to_bytes(msg), b': ', five.to_bytes(msg), b': ',
five.to_bytes(type(exc).__name__), b': ', five.to_bytes(type(exc).__name__), b': ',
@ -35,22 +28,22 @@ def _log_and_exit(msg, exc, formatted):
output.write_line(error_msg) output.write_line(error_msg)
store = Store() store = Store()
log_path = os.path.join(store.directory, 'pre-commit.log') log_path = os.path.join(store.directory, 'pre-commit.log')
output.write_line('Check the log at {}'.format(log_path)) output.write_line(f'Check the log at {log_path}')
with open(log_path, 'wb') as log: with open(log_path, 'wb') as log:
def _log_line(*s): # type: (*str) -> None def _log_line(s: Union[None, str, bytes] = None) -> None:
output.write_line(*s, stream=log) output.write_line(s, stream=log)
_log_line('### version information') _log_line('### version information')
_log_line() _log_line()
_log_line('```') _log_line('```')
_log_line('pre-commit version: {}'.format(C.VERSION)) _log_line(f'pre-commit version: {C.VERSION}')
_log_line('sys.version:') _log_line('sys.version:')
for line in sys.version.splitlines(): for line in sys.version.splitlines():
_log_line(' {}'.format(line)) _log_line(f' {line}')
_log_line('sys.executable: {}'.format(sys.executable)) _log_line(f'sys.executable: {sys.executable}')
_log_line('os.name: {}'.format(os.name)) _log_line(f'os.name: {os.name}')
_log_line('sys.platform: {}'.format(sys.platform)) _log_line(f'sys.platform: {sys.platform}')
_log_line('```') _log_line('```')
_log_line() _log_line()
@ -67,7 +60,7 @@ def _log_and_exit(msg, exc, formatted):
@contextlib.contextmanager @contextlib.contextmanager
def error_handler(): def error_handler() -> Generator[None, None, None]:
try: try:
yield yield
except (Exception, KeyboardInterrupt) as e: except (Exception, KeyboardInterrupt) as e:

View file

@ -1,11 +1,11 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib import contextlib
import errno import errno
import os
from typing import Callable
from typing import Generator
try: # pragma: no cover (windows) if os.name == 'nt': # pragma: no cover (windows)
import msvcrt import msvcrt
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/locking # https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/locking
@ -15,15 +15,20 @@ try: # pragma: no cover (windows)
_region = 0xffff _region = 0xffff
@contextlib.contextmanager @contextlib.contextmanager
def _locked(fileno, blocked_cb): def _locked(
fileno: int,
blocked_cb: Callable[[], None],
) -> Generator[None, None, None]:
try: try:
msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) # TODO: https://github.com/python/typeshed/pull/3607
except IOError: msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) # type: ignore
except OSError:
blocked_cb() blocked_cb()
while True: while True:
try: try:
msvcrt.locking(fileno, msvcrt.LK_LOCK, _region) # TODO: https://github.com/python/typeshed/pull/3607
except IOError as e: msvcrt.locking(fileno, msvcrt.LK_LOCK, _region) # type: ignore # noqa: E501
except OSError as e:
# Locking violation. Returned when the _LK_LOCK or _LK_RLCK # Locking violation. Returned when the _LK_LOCK or _LK_RLCK
# flag is specified and the file cannot be locked after 10 # flag is specified and the file cannot be locked after 10
# attempts. # attempts.
@ -40,15 +45,19 @@ try: # pragma: no cover (windows)
# The documentation however states: # The documentation however states:
# "Regions should be locked only briefly and should be unlocked # "Regions should be locked only briefly and should be unlocked
# before closing a file or exiting the program." # before closing a file or exiting the program."
msvcrt.locking(fileno, msvcrt.LK_UNLCK, _region) # TODO: https://github.com/python/typeshed/pull/3607
except ImportError: # pragma: windows no cover msvcrt.locking(fileno, msvcrt.LK_UNLCK, _region) # type: ignore
else: # pragma: windows no cover
import fcntl import fcntl
@contextlib.contextmanager @contextlib.contextmanager
def _locked(fileno, blocked_cb): def _locked(
fileno: int,
blocked_cb: Callable[[], None],
) -> Generator[None, None, None]:
try: try:
fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError: # pragma: no cover (tests are single-threaded) except OSError: # pragma: no cover (tests are single-threaded)
blocked_cb() blocked_cb()
fcntl.flock(fileno, fcntl.LOCK_EX) fcntl.flock(fileno, fcntl.LOCK_EX)
try: try:
@ -58,7 +67,10 @@ except ImportError: # pragma: windows no cover
@contextlib.contextmanager @contextlib.contextmanager
def lock(path, blocked_cb): def lock(
path: str,
blocked_cb: Callable[[], None],
) -> Generator[None, None, None]:
with open(path, 'a+') as f: with open(path, 'a+') as f:
with _locked(f.fileno(), blocked_cb): with _locked(f.fileno(), blocked_cb):
yield yield

View file

@ -1,15 +1,12 @@
from __future__ import absolute_import from typing import Union
from __future__ import unicode_literals
import six
def to_text(s): def to_text(s: Union[str, bytes]) -> str:
return s if isinstance(s, six.text_type) else s.decode('UTF-8') return s if isinstance(s, str) else s.decode('UTF-8')
def to_bytes(s): def to_bytes(s: Union[str, bytes]) -> bytes:
return s if isinstance(s, bytes) else s.encode('UTF-8') return s if isinstance(s, bytes) else s.encode('UTF-8')
n = to_bytes if six.PY2 else to_text n = to_text

View file

@ -1,17 +1,20 @@
from __future__ import unicode_literals
import logging import logging
import os.path import os.path
import sys import sys
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.util import EnvironT
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def zsplit(s): def zsplit(s: str) -> List[str]:
s = s.strip('\0') s = s.strip('\0')
if s: if s:
return s.split('\0') return s.split('\0')
@ -19,7 +22,7 @@ def zsplit(s):
return [] return []
def no_git_env(_env=None): def no_git_env(_env: Optional[EnvironT] = None) -> Dict[str, str]:
# Too many bugs dealing with environment variables and GIT: # Too many bugs dealing with environment variables and GIT:
# https://github.com/pre-commit/pre-commit/issues/300 # https://github.com/pre-commit/pre-commit/issues/300
# In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running
@ -36,11 +39,11 @@ def no_git_env(_env=None):
} }
def get_root(): def get_root() -> str:
return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip() return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
def get_git_dir(git_root='.'): def get_git_dir(git_root: str = '.') -> str:
opts = ('--git-common-dir', '--git-dir') opts = ('--git-common-dir', '--git-dir')
_, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root) _, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root)
for line, opt in zip(out.splitlines(), opts): for line, opt in zip(out.splitlines(), opts):
@ -50,12 +53,12 @@ def get_git_dir(git_root='.'):
raise AssertionError('unreachable: no git dir') raise AssertionError('unreachable: no git dir')
def get_remote_url(git_root): def get_remote_url(git_root: str) -> str:
_, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root) _, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root)
return out.strip() return out.strip()
def is_in_merge_conflict(): def is_in_merge_conflict() -> bool:
git_dir = get_git_dir('.') git_dir = get_git_dir('.')
return ( return (
os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and
@ -63,7 +66,7 @@ def is_in_merge_conflict():
) )
def parse_merge_msg_for_conflicts(merge_msg): def parse_merge_msg_for_conflicts(merge_msg: bytes) -> List[str]:
# Conflicted files start with tabs # Conflicted files start with tabs
return [ return [
line.lstrip(b'#').strip().decode('UTF-8') line.lstrip(b'#').strip().decode('UTF-8')
@ -73,7 +76,7 @@ def parse_merge_msg_for_conflicts(merge_msg):
] ]
def get_conflicted_files(): def get_conflicted_files() -> Set[str]:
logger.info('Checking merge-conflict files only.') logger.info('Checking merge-conflict files only.')
# Need to get the conflicted files from the MERGE_MSG because they could # Need to get the conflicted files from the MERGE_MSG because they could
# have resolved the conflict by choosing one side or the other # have resolved the conflict by choosing one side or the other
@ -94,7 +97,7 @@ def get_conflicted_files():
return set(merge_conflict_filenames) | set(merge_diff_filenames) return set(merge_conflict_filenames) | set(merge_diff_filenames)
def get_staged_files(cwd=None): def get_staged_files(cwd: Optional[str] = None) -> List[str]:
return zsplit( return zsplit(
cmd_output( cmd_output(
'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z', 'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',
@ -105,7 +108,7 @@ def get_staged_files(cwd=None):
) )
def intent_to_add_files(): def intent_to_add_files() -> List[str]:
_, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z') _, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')
parts = list(reversed(zsplit(stdout))) parts = list(reversed(zsplit(stdout)))
intent_to_add = [] intent_to_add = []
@ -119,37 +122,35 @@ def intent_to_add_files():
return intent_to_add return intent_to_add
def get_all_files(): def get_all_files() -> List[str]:
return zsplit(cmd_output('git', 'ls-files', '-z')[1]) return zsplit(cmd_output('git', 'ls-files', '-z')[1])
def get_changed_files(new, old): def get_changed_files(new: str, old: str) -> List[str]:
return zsplit( return zsplit(
cmd_output( cmd_output(
'git', 'diff', '--name-only', '--no-ext-diff', '-z', 'git', 'diff', '--name-only', '--no-ext-diff', '-z',
'{}...{}'.format(old, new), f'{old}...{new}',
)[1], )[1],
) )
def head_rev(remote): def head_rev(remote: str) -> str:
_, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD') _, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD')
return out.split()[0] return out.split()[0]
def has_diff(*args, **kwargs): def has_diff(*args: str, repo: str = '.') -> bool:
repo = kwargs.pop('repo', '.')
assert not kwargs, kwargs
cmd = ('git', 'diff', '--quiet', '--no-ext-diff') + args cmd = ('git', 'diff', '--quiet', '--no-ext-diff') + args
return cmd_output_b(*cmd, cwd=repo, retcode=None)[0] == 1 return cmd_output_b(*cmd, cwd=repo, retcode=None)[0] == 1
def has_core_hookpaths_set(): def has_core_hookpaths_set() -> bool:
_, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None) _, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None)
return bool(out.strip()) return bool(out.strip())
def init_repo(path, remote): def init_repo(path: str, remote: str) -> None:
if os.path.isdir(remote): if os.path.isdir(remote):
remote = os.path.abspath(remote) remote = os.path.abspath(remote)
@ -158,7 +159,7 @@ def init_repo(path, remote):
cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env) cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)
def commit(repo='.'): def commit(repo: str = '.') -> None:
env = no_git_env() env = no_git_env()
name, email = 'pre-commit', 'asottile+pre-commit@umich.edu' name, email = 'pre-commit', 'asottile+pre-commit@umich.edu'
env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name
@ -167,12 +168,12 @@ def commit(repo='.'):
cmd_output_b(*cmd, cwd=repo, env=env) cmd_output_b(*cmd, cwd=repo, env=env)
def git_path(name, repo='.'): def git_path(name: str, repo: str = '.') -> str:
_, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo) _, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo)
return os.path.join(repo, out.strip()) return os.path.join(repo, out.strip())
def check_for_cygwin_mismatch(): def check_for_cygwin_mismatch() -> None:
"""See https://github.com/pre-commit/pre-commit/issues/354""" """See https://github.com/pre-commit/pre-commit/issues/354"""
if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows) if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)
is_cygwin_python = sys.platform == 'cygwin' is_cygwin_python = sys.platform == 'cygwin'

View file

@ -1,4 +1,9 @@
from __future__ import unicode_literals from typing import Callable
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.languages import conda from pre_commit.languages import conda
from pre_commit.languages import docker from pre_commit.languages import docker
@ -14,57 +19,43 @@ from pre_commit.languages import rust
from pre_commit.languages import script from pre_commit.languages import script
from pre_commit.languages import swift from pre_commit.languages import swift
from pre_commit.languages import system from pre_commit.languages import system
from pre_commit.prefix import Prefix
# A language implements the following constant and functions in its module: if TYPE_CHECKING:
# from pre_commit.repository import Hook
# # Use None for no environment
# ENVIRONMENT_DIR = 'foo_env'
#
# def get_default_version():
# """Return a value to replace the 'default' value for language_version.
#
# return 'default' if there is no better option.
# """
#
# def healthy(prefix, language_version):
# """Return whether or not the environment is considered functional."""
#
# def install_environment(prefix, version, additional_dependencies):
# """Installs a repository in the given repository. Note that the current
# working directory will already be inside the repository.
#
# Args:
# prefix - `Prefix` bound to the repository.
# version - A version specified in the hook configuration or 'default'.
# """
#
# def run_hook(hook, file_args, color):
# """Runs a hook and returns the returncode and output of running that
# hook.
#
# Args:
# hook - `Hook`
# file_args - The files to be run
# color - whether the hook should be given a pty (when supported)
#
# Returns:
# (returncode, output)
# """
class Language(NamedTuple):
name: str
# Use `None` for no installation / environment
ENVIRONMENT_DIR: Optional[str]
# return a value to replace `'default` for `language_version`
get_default_version: Callable[[], str]
# return whether the environment is healthy (or should be rebuilt)
healthy: Callable[[Prefix, str], bool]
# install a repository for the given language and language_version
install_environment: Callable[[Prefix, str, Sequence[str]], None]
# execute a hook and return the exit code and output
run_hook: 'Callable[[Hook, Sequence[str], bool], Tuple[int, bytes]]'
# TODO: back to modules + Protocol: https://github.com/python/mypy/issues/5018
languages = { languages = {
'conda': conda, # BEGIN GENERATED (testing/gen-languages-all)
'docker': docker, 'conda': Language(name='conda', ENVIRONMENT_DIR=conda.ENVIRONMENT_DIR, get_default_version=conda.get_default_version, healthy=conda.healthy, install_environment=conda.install_environment, run_hook=conda.run_hook), # noqa: E501
'docker_image': docker_image, 'docker': Language(name='docker', ENVIRONMENT_DIR=docker.ENVIRONMENT_DIR, get_default_version=docker.get_default_version, healthy=docker.healthy, install_environment=docker.install_environment, run_hook=docker.run_hook), # noqa: E501
'fail': fail, 'docker_image': Language(name='docker_image', ENVIRONMENT_DIR=docker_image.ENVIRONMENT_DIR, get_default_version=docker_image.get_default_version, healthy=docker_image.healthy, install_environment=docker_image.install_environment, run_hook=docker_image.run_hook), # noqa: E501
'golang': golang, 'fail': Language(name='fail', ENVIRONMENT_DIR=fail.ENVIRONMENT_DIR, get_default_version=fail.get_default_version, healthy=fail.healthy, install_environment=fail.install_environment, run_hook=fail.run_hook), # noqa: E501
'node': node, 'golang': Language(name='golang', ENVIRONMENT_DIR=golang.ENVIRONMENT_DIR, get_default_version=golang.get_default_version, healthy=golang.healthy, install_environment=golang.install_environment, run_hook=golang.run_hook), # noqa: E501
'pygrep': pygrep, 'node': Language(name='node', ENVIRONMENT_DIR=node.ENVIRONMENT_DIR, get_default_version=node.get_default_version, healthy=node.healthy, install_environment=node.install_environment, run_hook=node.run_hook), # noqa: E501
'python': python, 'pygrep': Language(name='pygrep', ENVIRONMENT_DIR=pygrep.ENVIRONMENT_DIR, get_default_version=pygrep.get_default_version, healthy=pygrep.healthy, install_environment=pygrep.install_environment, run_hook=pygrep.run_hook), # noqa: E501
'python_venv': python_venv, 'python': Language(name='python', ENVIRONMENT_DIR=python.ENVIRONMENT_DIR, get_default_version=python.get_default_version, healthy=python.healthy, install_environment=python.install_environment, run_hook=python.run_hook), # noqa: E501
'ruby': ruby, 'python_venv': Language(name='python_venv', ENVIRONMENT_DIR=python_venv.ENVIRONMENT_DIR, get_default_version=python_venv.get_default_version, healthy=python_venv.healthy, install_environment=python_venv.install_environment, run_hook=python_venv.run_hook), # noqa: E501
'rust': rust, 'ruby': Language(name='ruby', ENVIRONMENT_DIR=ruby.ENVIRONMENT_DIR, get_default_version=ruby.get_default_version, healthy=ruby.healthy, install_environment=ruby.install_environment, run_hook=ruby.run_hook), # noqa: E501
'script': script, 'rust': Language(name='rust', ENVIRONMENT_DIR=rust.ENVIRONMENT_DIR, get_default_version=rust.get_default_version, healthy=rust.healthy, install_environment=rust.install_environment, run_hook=rust.run_hook), # noqa: E501
'swift': swift, 'script': Language(name='script', ENVIRONMENT_DIR=script.ENVIRONMENT_DIR, get_default_version=script.get_default_version, healthy=script.healthy, install_environment=script.install_environment, run_hook=script.run_hook), # noqa: E501
'system': system, 'swift': Language(name='swift', ENVIRONMENT_DIR=swift.ENVIRONMENT_DIR, get_default_version=swift.get_default_version, healthy=swift.healthy, install_environment=swift.install_environment, run_hook=swift.run_hook), # noqa: E501
'system': Language(name='system', ENVIRONMENT_DIR=system.ENVIRONMENT_DIR, get_default_version=system.get_default_version, healthy=system.healthy, install_environment=system.install_environment, run_hook=system.run_hook), # noqa: E501
# END GENERATED
} }
all_languages = sorted(languages) all_languages = sorted(languages)

View file

@ -1,24 +1,34 @@
import contextlib import contextlib
import os import os
from typing import Generator
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import SubstitutionT
from pre_commit.envcontext import UNSET from pre_commit.envcontext import UNSET
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'conda' ENVIRONMENT_DIR = 'conda'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def get_env_patch(env): def get_env_patch(env: str) -> PatchesT:
# On non-windows systems executable live in $CONDA_PREFIX/bin, on Windows # On non-windows systems executable live in $CONDA_PREFIX/bin, on Windows
# they can be in $CONDA_PREFIX/bin, $CONDA_PREFIX/Library/bin, # they can be in $CONDA_PREFIX/bin, $CONDA_PREFIX/Library/bin,
# $CONDA_PREFIX/Scripts and $CONDA_PREFIX. Whereas the latter only # $CONDA_PREFIX/Scripts and $CONDA_PREFIX. Whereas the latter only
# seems to be used for python.exe. # seems to be used for python.exe.
path = (os.path.join(env, 'bin'), os.pathsep, Var('PATH')) path: SubstitutionT = (os.path.join(env, 'bin'), os.pathsep, Var('PATH'))
if os.name == 'nt': # pragma: no cover (platform specific) if os.name == 'nt': # pragma: no cover (platform specific)
path = (env, os.pathsep) + path path = (env, os.pathsep) + path
path = (os.path.join(env, 'Scripts'), os.pathsep) + path path = (os.path.join(env, 'Scripts'), os.pathsep) + path
@ -33,14 +43,21 @@ def get_env_patch(env):
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix, language_version): def in_env(
prefix: Prefix,
language_version: str,
) -> Generator[None, None, None]:
directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version) directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version)
envdir = prefix.path(directory) envdir = prefix.path(directory)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield
def install_environment(prefix, version, additional_dependencies): def install_environment(
prefix: Prefix,
version: str,
additional_dependencies: Sequence[str],
) -> None:
helpers.assert_version_default('conda', version) helpers.assert_version_default('conda', version)
directory = helpers.environment_dir(ENVIRONMENT_DIR, version) directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
@ -53,11 +70,15 @@ def install_environment(prefix, version, additional_dependencies):
if additional_dependencies: if additional_dependencies:
cmd_output_b( cmd_output_b(
'conda', 'install', '-p', env_dir, *additional_dependencies, 'conda', 'install', '-p', env_dir, *additional_dependencies,
cwd=prefix.prefix_dir cwd=prefix.prefix_dir,
) )
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
# TODO: Some rare commands need to be run using `conda run` but mostly we # TODO: Some rare commands need to be run using `conda run` but mostly we
# can run them withot which is much quicker and produces a better # can run them withot which is much quicker and produces a better
# output. # output.

View file

@ -1,16 +1,18 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import hashlib import hashlib
import os import os
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import five
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'docker' ENVIRONMENT_DIR = 'docker'
PRE_COMMIT_LABEL = 'PRE_COMMIT' PRE_COMMIT_LABEL = 'PRE_COMMIT'
@ -18,16 +20,16 @@ get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def md5(s): # pragma: windows no cover def md5(s: str) -> str: # pragma: windows no cover
return hashlib.md5(five.to_bytes(s)).hexdigest() return hashlib.md5(s.encode()).hexdigest()
def docker_tag(prefix): # pragma: windows no cover def docker_tag(prefix: Prefix) -> str: # pragma: windows no cover
md5sum = md5(os.path.basename(prefix.prefix_dir)).lower() md5sum = md5(os.path.basename(prefix.prefix_dir)).lower()
return 'pre-commit-{}'.format(md5sum) return f'pre-commit-{md5sum}'
def docker_is_running(): # pragma: windows no cover def docker_is_running() -> bool: # pragma: windows no cover
try: try:
cmd_output_b('docker', 'ps') cmd_output_b('docker', 'ps')
except CalledProcessError: except CalledProcessError:
@ -36,16 +38,18 @@ def docker_is_running(): # pragma: windows no cover
return True return True
def assert_docker_available(): # pragma: windows no cover def assert_docker_available() -> None: # pragma: windows no cover
assert docker_is_running(), ( assert docker_is_running(), (
'Docker is either not running or not configured in this environment' 'Docker is either not running or not configured in this environment'
) )
def build_docker_image(prefix, **kwargs): # pragma: windows no cover def build_docker_image(
pull = kwargs.pop('pull') prefix: Prefix,
assert not kwargs, kwargs *,
cmd = ( pull: bool,
) -> None: # pragma: windows no cover
cmd: Tuple[str, ...] = (
'docker', 'build', 'docker', 'build',
'--tag', docker_tag(prefix), '--tag', docker_tag(prefix),
'--label', PRE_COMMIT_LABEL, '--label', PRE_COMMIT_LABEL,
@ -58,8 +62,8 @@ def build_docker_image(prefix, **kwargs): # pragma: windows no cover
def install_environment( def install_environment(
prefix, version, additional_dependencies, prefix: Prefix, version: str, additional_dependencies: Sequence[str],
): # pragma: windows no cover ) -> None: # pragma: windows no cover
helpers.assert_version_default('docker', version) helpers.assert_version_default('docker', version)
helpers.assert_no_additional_deps('docker', additional_dependencies) helpers.assert_no_additional_deps('docker', additional_dependencies)
assert_docker_available() assert_docker_available()
@ -75,14 +79,14 @@ def install_environment(
os.mkdir(directory) os.mkdir(directory)
def get_docker_user(): # pragma: windows no cover def get_docker_user() -> str: # pragma: windows no cover
try: try:
return '{}:{}'.format(os.getuid(), os.getgid()) return '{}:{}'.format(os.getuid(), os.getgid())
except AttributeError: except AttributeError:
return '1000:1000' return '1000:1000'
def docker_cmd(): # pragma: windows no cover def docker_cmd() -> Tuple[str, ...]: # pragma: windows no cover
return ( return (
'docker', 'run', 'docker', 'run',
'--rm', '--rm',
@ -95,7 +99,11 @@ def docker_cmd(): # pragma: windows no cover
) )
def run_hook(hook, file_args, color): # pragma: windows no cover def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
assert_docker_available() assert_docker_available()
# Rebuild the docker image in case it has gone missing, as many people do # Rebuild the docker image in case it has gone missing, as many people do
# automated cleanup of docker images. # automated cleanup of docker images.

View file

@ -1,10 +1,13 @@
from __future__ import absolute_import from typing import Sequence
from __future__ import unicode_literals from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.languages.docker import assert_docker_available from pre_commit.languages.docker import assert_docker_available
from pre_commit.languages.docker import docker_cmd from pre_commit.languages.docker import docker_cmd
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = None ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
@ -12,7 +15,11 @@ healthy = helpers.basic_healthy
install_environment = helpers.no_install install_environment = helpers.no_install
def run_hook(hook, file_args, color): # pragma: windows no cover def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
assert_docker_available() assert_docker_available()
cmd = docker_cmd() + hook.cmd cmd = docker_cmd() + hook.cmd
return helpers.run_xargs(hook, cmd, file_args, color=color) return helpers.run_xargs(hook, cmd, file_args, color=color)

View file

@ -1,7 +1,11 @@
from __future__ import unicode_literals from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.languages import helpers from pre_commit.languages import helpers
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = None ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
@ -9,7 +13,11 @@ healthy = helpers.basic_healthy
install_environment = helpers.no_install install_environment = helpers.no_install
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
out = hook.entry.encode('UTF-8') + b'\n\n' out = hook.entry.encode('UTF-8') + b'\n\n'
out += b'\n'.join(f.encode('UTF-8') for f in file_args) + b'\n' out += b'\n'.join(f.encode('UTF-8') for f in file_args) + b'\n'
return 1, out return 1, out

View file

@ -1,33 +1,39 @@
from __future__ import unicode_literals
import contextlib import contextlib
import os.path import os.path
import sys import sys
from typing import Generator
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import git from pre_commit import git
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.util import rmtree from pre_commit.util import rmtree
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'golangenv' ENVIRONMENT_DIR = 'golangenv'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def get_env_patch(venv): def get_env_patch(venv: str) -> PatchesT:
return ( return (
('PATH', (os.path.join(venv, 'bin'), os.pathsep, Var('PATH'))), ('PATH', (os.path.join(venv, 'bin'), os.pathsep, Var('PATH'))),
) )
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix): def in_env(prefix: Prefix) -> Generator[None, None, None]:
envdir = prefix.path( envdir = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT), helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
) )
@ -35,7 +41,7 @@ def in_env(prefix):
yield yield
def guess_go_dir(remote_url): def guess_go_dir(remote_url: str) -> str:
if remote_url.endswith('.git'): if remote_url.endswith('.git'):
remote_url = remote_url[:-1 * len('.git')] remote_url = remote_url[:-1 * len('.git')]
looks_like_url = ( looks_like_url = (
@ -51,7 +57,11 @@ def guess_go_dir(remote_url):
return 'unknown_src_dir' return 'unknown_src_dir'
def install_environment(prefix, version, additional_dependencies): def install_environment(
prefix: Prefix,
version: str,
additional_dependencies: Sequence[str],
) -> None:
helpers.assert_version_default('golang', version) helpers.assert_version_default('golang', version)
directory = prefix.path( directory = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT), helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
@ -81,6 +91,10 @@ def install_environment(prefix, version, additional_dependencies):
rmtree(pkgdir) rmtree(pkgdir)
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
with in_env(hook.prefix): with in_env(hook.prefix):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,37 +1,54 @@
from __future__ import unicode_literals
import multiprocessing import multiprocessing
import os import os
import random import random
from typing import Any
import six from typing import List
from typing import NoReturn
from typing import Optional
from typing import overload
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.prefix import Prefix
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.xargs import xargs from pre_commit.xargs import xargs
if TYPE_CHECKING:
from pre_commit.repository import Hook
FIXED_RANDOM_SEED = 1542676186 FIXED_RANDOM_SEED = 1542676186
def run_setup_cmd(prefix, cmd): def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:
cmd_output_b(*cmd, cwd=prefix.prefix_dir) cmd_output_b(*cmd, cwd=prefix.prefix_dir)
def environment_dir(ENVIRONMENT_DIR, language_version): @overload
if ENVIRONMENT_DIR is None: def environment_dir(d: None, language_version: str) -> None: ...
@overload
def environment_dir(d: str, language_version: str) -> str: ...
def environment_dir(d: Optional[str], language_version: str) -> Optional[str]:
if d is None:
return None return None
else: else:
return '{}-{}'.format(ENVIRONMENT_DIR, language_version) return f'{d}-{language_version}'
def assert_version_default(binary, version): def assert_version_default(binary: str, version: str) -> None:
if version != C.DEFAULT: if version != C.DEFAULT:
raise AssertionError( raise AssertionError(
'For now, pre-commit requires system-installed {}'.format(binary), f'For now, pre-commit requires system-installed {binary}',
) )
def assert_no_additional_deps(lang, additional_deps): def assert_no_additional_deps(
lang: str,
additional_deps: Sequence[str],
) -> None:
if additional_deps: if additional_deps:
raise AssertionError( raise AssertionError(
'For now, pre-commit does not support ' 'For now, pre-commit does not support '
@ -39,19 +56,23 @@ def assert_no_additional_deps(lang, additional_deps):
) )
def basic_get_default_version(): def basic_get_default_version() -> str:
return C.DEFAULT return C.DEFAULT
def basic_healthy(prefix, language_version): def basic_healthy(prefix: Prefix, language_version: str) -> bool:
return True return True
def no_install(prefix, version, additional_dependencies): def no_install(
prefix: Prefix,
version: str,
additional_dependencies: Sequence[str],
) -> NoReturn:
raise AssertionError('This type is not installable') raise AssertionError('This type is not installable')
def target_concurrency(hook): def target_concurrency(hook: 'Hook') -> int:
if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ: if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ:
return 1 return 1
else: else:
@ -65,20 +86,22 @@ def target_concurrency(hook):
return 1 return 1
def _shuffled(seq): def _shuffled(seq: Sequence[str]) -> List[str]:
"""Deterministically shuffle identically under both py2 + py3.""" """Deterministically shuffle"""
fixed_random = random.Random() fixed_random = random.Random()
if six.PY2: # pragma: no cover (py2) fixed_random.seed(FIXED_RANDOM_SEED, version=1)
fixed_random.seed(FIXED_RANDOM_SEED)
else: # pragma: no cover (py3)
fixed_random.seed(FIXED_RANDOM_SEED, version=1)
seq = list(seq) seq = list(seq)
random.shuffle(seq, random=fixed_random.random) random.shuffle(seq, random=fixed_random.random)
return seq return seq
def run_xargs(hook, cmd, file_args, **kwargs): def run_xargs(
hook: 'Hook',
cmd: Tuple[str, ...],
file_args: Sequence[str],
**kwargs: Any,
) -> Tuple[int, bytes]:
# Shuffle the files so that they more evenly fill out the xargs partitions, # Shuffle the files so that they more evenly fill out the xargs partitions,
# but do it deterministically in case a hook cares about ordering. # but do it deterministically in case a hook cares about ordering.
file_args = _shuffled(file_args) file_args = _shuffled(file_args)

View file

@ -1,30 +1,36 @@
from __future__ import unicode_literals
import contextlib import contextlib
import os import os
import sys import sys
from typing import Generator
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.languages.python import bin_dir from pre_commit.languages.python import bin_dir
from pre_commit.prefix import Prefix
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'node_env' ENVIRONMENT_DIR = 'node_env'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def _envdir(prefix, version): def _envdir(prefix: Prefix, version: str) -> str:
directory = helpers.environment_dir(ENVIRONMENT_DIR, version) directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
return prefix.path(directory) return prefix.path(directory)
def get_env_patch(venv): # pragma: windows no cover def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
if sys.platform == 'cygwin': # pragma: no cover if sys.platform == 'cygwin': # pragma: no cover
_, win_venv, _ = cmd_output('cygpath', '-w', venv) _, win_venv, _ = cmd_output('cygpath', '-w', venv)
install_prefix = r'{}\bin'.format(win_venv.strip()) install_prefix = r'{}\bin'.format(win_venv.strip())
@ -45,14 +51,17 @@ def get_env_patch(venv): # pragma: windows no cover
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix, language_version): # pragma: windows no cover def in_env(
prefix: Prefix,
language_version: str,
) -> Generator[None, None, None]: # pragma: windows no cover
with envcontext(get_env_patch(_envdir(prefix, language_version))): with envcontext(get_env_patch(_envdir(prefix, language_version))):
yield yield
def install_environment( def install_environment(
prefix, version, additional_dependencies, prefix: Prefix, version: str, additional_dependencies: Sequence[str],
): # pragma: windows no cover ) -> None: # pragma: windows no cover
additional_dependencies = tuple(additional_dependencies) additional_dependencies = tuple(additional_dependencies)
assert prefix.exists('package.json') assert prefix.exists('package.json')
envdir = _envdir(prefix, version) envdir = _envdir(prefix, version)
@ -78,6 +87,10 @@ def install_environment(
) )
def run_hook(hook, file_args, color): # pragma: windows no cover def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
with in_env(hook.prefix, hook.language_version): with in_env(hook.prefix, hook.language_version):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,14 +1,18 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse import argparse
import re import re
import sys import sys
from typing import Optional
from typing import Pattern
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit import output from pre_commit import output
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.xargs import xargs from pre_commit.xargs import xargs
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = None ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
@ -16,18 +20,18 @@ healthy = helpers.basic_healthy
install_environment = helpers.no_install install_environment = helpers.no_install
def _process_filename_by_line(pattern, filename): def _process_filename_by_line(pattern: Pattern[bytes], filename: str) -> int:
retv = 0 retv = 0
with open(filename, 'rb') as f: with open(filename, 'rb') as f:
for line_no, line in enumerate(f, start=1): for line_no, line in enumerate(f, start=1):
if pattern.search(line): if pattern.search(line):
retv = 1 retv = 1
output.write('{}:{}:'.format(filename, line_no)) output.write(f'{filename}:{line_no}:')
output.write_line(line.rstrip(b'\r\n')) output.write_line(line.rstrip(b'\r\n'))
return retv return retv
def _process_filename_at_once(pattern, filename): def _process_filename_at_once(pattern: Pattern[bytes], filename: str) -> int:
retv = 0 retv = 0
with open(filename, 'rb') as f: with open(filename, 'rb') as f:
contents = f.read() contents = f.read()
@ -44,12 +48,16 @@ def _process_filename_at_once(pattern, filename):
return retv return retv
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
exe = (sys.executable, '-m', __name__) + tuple(hook.args) + (hook.entry,) exe = (sys.executable, '-m', __name__) + tuple(hook.args) + (hook.entry,)
return xargs(exe, file_args, color=color) return xargs(exe, file_args, color=color)
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description=( description=(
'grep-like finder using python regexes. Unlike grep, this tool ' 'grep-like finder using python regexes. Unlike grep, this tool '

View file

@ -1,31 +1,41 @@
from __future__ import unicode_literals
import contextlib import contextlib
import functools
import os import os
import sys import sys
from typing import Callable
from typing import ContextManager
from typing import Generator
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import UNSET from pre_commit.envcontext import UNSET
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.parse_shebang import find_executable from pre_commit.parse_shebang import find_executable
from pre_commit.prefix import Prefix
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'py_env' ENVIRONMENT_DIR = 'py_env'
def bin_dir(venv): def bin_dir(venv: str) -> str:
"""On windows there's a different directory for the virtualenv""" """On windows there's a different directory for the virtualenv"""
bin_part = 'Scripts' if os.name == 'nt' else 'bin' bin_part = 'Scripts' if os.name == 'nt' else 'bin'
return os.path.join(venv, bin_part) return os.path.join(venv, bin_part)
def get_env_patch(venv): def get_env_patch(venv: str) -> PatchesT:
return ( return (
('PYTHONHOME', UNSET), ('PYTHONHOME', UNSET),
('VIRTUAL_ENV', venv), ('VIRTUAL_ENV', venv),
@ -33,7 +43,9 @@ def get_env_patch(venv):
) )
def _find_by_py_launcher(version): # pragma: no cover (windows only) def _find_by_py_launcher(
version: str,
) -> Optional[str]: # pragma: no cover (windows only)
if version.startswith('python'): if version.startswith('python'):
try: try:
return cmd_output( return cmd_output(
@ -42,14 +54,16 @@ def _find_by_py_launcher(version): # pragma: no cover (windows only)
)[1].strip() )[1].strip()
except CalledProcessError: except CalledProcessError:
pass pass
return None
def _find_by_sys_executable(): def _find_by_sys_executable() -> Optional[str]:
def _norm(path): def _norm(path: str) -> Optional[str]:
_, exe = os.path.split(path.lower()) _, exe = os.path.split(path.lower())
exe, _, _ = exe.partition('.exe') exe, _, _ = exe.partition('.exe')
if find_executable(exe) and exe not in {'python', 'pythonw'}: if find_executable(exe) and exe not in {'python', 'pythonw'}:
return exe return exe
return None
# On linux, I see these common sys.executables: # On linux, I see these common sys.executables:
# #
@ -66,7 +80,8 @@ def _find_by_sys_executable():
return None return None
def _get_default_version(): # pragma: no cover (platform dependent) @functools.lru_cache(maxsize=1)
def get_default_version() -> str: # pragma: no cover (platform dependent)
# First attempt from `sys.executable` (or the realpath) # First attempt from `sys.executable` (or the realpath)
exe = _find_by_sys_executable() exe = _find_by_sys_executable()
if exe: if exe:
@ -88,16 +103,7 @@ def _get_default_version(): # pragma: no cover (platform dependent)
return C.DEFAULT return C.DEFAULT
def get_default_version(): def _sys_executable_matches(version: str) -> bool:
# TODO: when dropping python2, use `functools.lru_cache(maxsize=1)`
try:
return get_default_version.cached_version
except AttributeError:
get_default_version.cached_version = _get_default_version()
return get_default_version()
def _sys_executable_matches(version):
if version == 'python': if version == 'python':
return True return True
elif not version.startswith('python'): elif not version.startswith('python'):
@ -111,7 +117,7 @@ def _sys_executable_matches(version):
return sys.version_info[:len(info)] == info return sys.version_info[:len(info)] == info
def norm_version(version): def norm_version(version: str) -> str:
# first see if our current executable is appropriate # first see if our current executable is appropriate
if _sys_executable_matches(version): if _sys_executable_matches(version):
return sys.executable return sys.executable
@ -135,14 +141,25 @@ def norm_version(version):
return os.path.expanduser(version) return os.path.expanduser(version)
def py_interface(_dir, _make_venv): def py_interface(
_dir: str,
_make_venv: Callable[[str, str], None],
) -> Tuple[
Callable[[Prefix, str], ContextManager[None]],
Callable[[Prefix, str], bool],
Callable[['Hook', Sequence[str], bool], Tuple[int, bytes]],
Callable[[Prefix, str, Sequence[str]], None],
]:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix, language_version): def in_env(
prefix: Prefix,
language_version: str,
) -> Generator[None, None, None]:
envdir = prefix.path(helpers.environment_dir(_dir, language_version)) envdir = prefix.path(helpers.environment_dir(_dir, language_version))
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield
def healthy(prefix, language_version): def healthy(prefix: Prefix, language_version: str) -> bool:
with in_env(prefix, language_version): with in_env(prefix, language_version):
retcode, _, _ = cmd_output_b( retcode, _, _ = cmd_output_b(
'python', '-c', 'python', '-c',
@ -152,11 +169,19 @@ def py_interface(_dir, _make_venv):
) )
return retcode == 0 return retcode == 0
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
with in_env(hook.prefix, hook.language_version): with in_env(hook.prefix, hook.language_version):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
def install_environment(prefix, version, additional_dependencies): def install_environment(
prefix: Prefix,
version: str,
additional_dependencies: Sequence[str],
) -> None:
additional_dependencies = tuple(additional_dependencies) additional_dependencies = tuple(additional_dependencies)
directory = helpers.environment_dir(_dir, version) directory = helpers.environment_dir(_dir, version)
@ -175,7 +200,7 @@ def py_interface(_dir, _make_venv):
return in_env, healthy, run_hook, install_environment return in_env, healthy, run_hook, install_environment
def make_venv(envdir, python): def make_venv(envdir: str, python: str) -> None:
env = dict(os.environ, VIRTUALENV_NO_DOWNLOAD='1') env = dict(os.environ, VIRTUALENV_NO_DOWNLOAD='1')
cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python) cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python)
cmd_output_b(*cmd, env=env, cwd='/') cmd_output_b(*cmd, env=env, cwd='/')

View file

@ -1,25 +1,15 @@
from __future__ import unicode_literals
import os.path import os.path
import sys
from pre_commit.languages import python from pre_commit.languages import python
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import cmd_output from pre_commit.util import cmd_output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
ENVIRONMENT_DIR = 'py_venv' ENVIRONMENT_DIR = 'py_venv'
get_default_version = python.get_default_version
def get_default_version(): # pragma: no cover (version specific) def orig_py_exe(exe: str) -> str: # pragma: no cover (platform specific)
if sys.version_info < (3,):
return 'python3'
else:
return python.get_default_version()
def orig_py_exe(exe): # pragma: no cover (platform specific)
"""A -mvenv virtualenv made from a -mvirtualenv virtualenv installs """A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
packages to the incorrect location. Attempt to find the _original_ exe packages to the incorrect location. Attempt to find the _original_ exe
and invoke `-mvenv` from there. and invoke `-mvenv` from there.
@ -48,7 +38,7 @@ def orig_py_exe(exe): # pragma: no cover (platform specific)
return exe return exe
def make_venv(envdir, python): def make_venv(envdir: str, python: str) -> None:
cmd_output_b(orig_py_exe(python), '-mvenv', envdir, cwd='/') cmd_output_b(orig_py_exe(python), '-mvenv', envdir, cwd='/')

View file

@ -1,27 +1,35 @@
from __future__ import unicode_literals
import contextlib import contextlib
import io
import os.path import os.path
import shutil import shutil
import tarfile import tarfile
from typing import Generator
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import resource_bytesio from pre_commit.util import resource_bytesio
if TYPE_CHECKING:
from pre_comit.repository import Hook
ENVIRONMENT_DIR = 'rbenv' ENVIRONMENT_DIR = 'rbenv'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def get_env_patch(venv, language_version): # pragma: windows no cover def get_env_patch(
patches = ( venv: str,
language_version: str,
) -> PatchesT: # pragma: windows no cover
patches: PatchesT = (
('GEM_HOME', os.path.join(venv, 'gems')), ('GEM_HOME', os.path.join(venv, 'gems')),
('RBENV_ROOT', venv), ('RBENV_ROOT', venv),
('BUNDLE_IGNORE_CONFIG', '1'), ('BUNDLE_IGNORE_CONFIG', '1'),
@ -38,8 +46,11 @@ def get_env_patch(venv, language_version): # pragma: windows no cover
return patches return patches
@contextlib.contextmanager @contextlib.contextmanager # pragma: windows no cover
def in_env(prefix, language_version): # pragma: windows no cover def in_env(
prefix: Prefix,
language_version: str,
) -> Generator[None, None, None]:
envdir = prefix.path( envdir = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, language_version), helpers.environment_dir(ENVIRONMENT_DIR, language_version),
) )
@ -47,13 +58,16 @@ def in_env(prefix, language_version): # pragma: windows no cover
yield yield
def _extract_resource(filename, dest): def _extract_resource(filename: str, dest: str) -> None:
with resource_bytesio(filename) as bio: with resource_bytesio(filename) as bio:
with tarfile.open(fileobj=bio) as tf: with tarfile.open(fileobj=bio) as tf:
tf.extractall(dest) tf.extractall(dest)
def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover def _install_rbenv(
prefix: Prefix,
version: str = C.DEFAULT,
) -> None: # pragma: windows no cover
directory = helpers.environment_dir(ENVIRONMENT_DIR, version) directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
_extract_resource('rbenv.tar.gz', prefix.path('.')) _extract_resource('rbenv.tar.gz', prefix.path('.'))
@ -66,7 +80,7 @@ def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover
_extract_resource('ruby-build.tar.gz', plugins_dir) _extract_resource('ruby-build.tar.gz', plugins_dir)
activate_path = prefix.path(directory, 'bin', 'activate') activate_path = prefix.path(directory, 'bin', 'activate')
with io.open(activate_path, 'w') as activate_file: with open(activate_path, 'w') as activate_file:
# This is similar to how you would install rbenv to your home directory # This is similar to how you would install rbenv to your home directory
# However we do a couple things to make the executables exposed and # However we do a couple things to make the executables exposed and
# configure it to work in our directory. # configure it to work in our directory.
@ -86,10 +100,13 @@ def _install_rbenv(prefix, version=C.DEFAULT): # pragma: windows no cover
# If we aren't using the system ruby, add a version here # If we aren't using the system ruby, add a version here
if version != C.DEFAULT: if version != C.DEFAULT:
activate_file.write('export RBENV_VERSION="{}"\n'.format(version)) activate_file.write(f'export RBENV_VERSION="{version}"\n')
def _install_ruby(prefix, version): # pragma: windows no cover def _install_ruby(
prefix: Prefix,
version: str,
) -> None: # pragma: windows no cover
try: try:
helpers.run_setup_cmd(prefix, ('rbenv', 'download', version)) helpers.run_setup_cmd(prefix, ('rbenv', 'download', version))
except CalledProcessError: # pragma: no cover (usually find with download) except CalledProcessError: # pragma: no cover (usually find with download)
@ -98,8 +115,8 @@ def _install_ruby(prefix, version): # pragma: windows no cover
def install_environment( def install_environment(
prefix, version, additional_dependencies, prefix: Prefix, version: str, additional_dependencies: Sequence[str],
): # pragma: windows no cover ) -> None: # pragma: windows no cover
additional_dependencies = tuple(additional_dependencies) additional_dependencies = tuple(additional_dependencies)
directory = helpers.environment_dir(ENVIRONMENT_DIR, version) directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
with clean_path_on_failure(prefix.path(directory)): with clean_path_on_failure(prefix.path(directory)):
@ -124,6 +141,10 @@ def install_environment(
) )
def run_hook(hook, file_args, color): # pragma: windows no cover def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
with in_env(hook.prefix, hook.language_version): with in_env(hook.prefix, hook.language_version):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,24 +1,31 @@
from __future__ import unicode_literals
import contextlib import contextlib
import os.path import os.path
from typing import Generator
from typing import Sequence
from typing import Set
from typing import Tuple
from typing import TYPE_CHECKING
import toml import toml
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'rustenv' ENVIRONMENT_DIR = 'rustenv'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
def get_env_patch(target_dir): def get_env_patch(target_dir: str) -> PatchesT:
return ( return (
( (
'PATH', 'PATH',
@ -28,7 +35,7 @@ def get_env_patch(target_dir):
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix): def in_env(prefix: Prefix) -> Generator[None, None, None]:
target_dir = prefix.path( target_dir = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT), helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
) )
@ -36,7 +43,10 @@ def in_env(prefix):
yield yield
def _add_dependencies(cargo_toml_path, additional_dependencies): def _add_dependencies(
cargo_toml_path: str,
additional_dependencies: Set[str],
) -> None:
with open(cargo_toml_path, 'r+') as f: with open(cargo_toml_path, 'r+') as f:
cargo_toml = toml.load(f) cargo_toml = toml.load(f)
cargo_toml.setdefault('dependencies', {}) cargo_toml.setdefault('dependencies', {})
@ -48,7 +58,11 @@ def _add_dependencies(cargo_toml_path, additional_dependencies):
f.truncate() f.truncate()
def install_environment(prefix, version, additional_dependencies): def install_environment(
prefix: Prefix,
version: str,
additional_dependencies: Sequence[str],
) -> None:
helpers.assert_version_default('rust', version) helpers.assert_version_default('rust', version)
directory = prefix.path( directory = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT), helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
@ -73,7 +87,7 @@ def install_environment(prefix, version, additional_dependencies):
_add_dependencies(prefix.path('Cargo.toml'), lib_deps) _add_dependencies(prefix.path('Cargo.toml'), lib_deps)
with clean_path_on_failure(directory): with clean_path_on_failure(directory):
packages_to_install = {('--path', '.')} packages_to_install: Set[Tuple[str, ...]] = {('--path', '.')}
for cli_dep in cli_deps: for cli_dep in cli_deps:
cli_dep = cli_dep[len('cli:'):] cli_dep = cli_dep[len('cli:'):]
package, _, version = cli_dep.partition(':') package, _, version = cli_dep.partition(':')
@ -82,13 +96,17 @@ def install_environment(prefix, version, additional_dependencies):
else: else:
packages_to_install.add((package,)) packages_to_install.add((package,))
for package in packages_to_install: for args in packages_to_install:
cmd_output_b( cmd_output_b(
'cargo', 'install', '--bins', '--root', directory, *package, 'cargo', 'install', '--bins', '--root', directory, *args,
cwd=prefix.prefix_dir cwd=prefix.prefix_dir,
) )
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
with in_env(hook.prefix): with in_env(hook.prefix):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,7 +1,11 @@
from __future__ import unicode_literals from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.languages import helpers from pre_commit.languages import helpers
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = None ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
@ -9,7 +13,11 @@ healthy = helpers.basic_healthy
install_environment = helpers.no_install install_environment = helpers.no_install
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
cmd = hook.cmd cmd = hook.cmd
cmd = (hook.prefix.path(cmd[0]),) + cmd[1:] cmd = (hook.prefix.path(cmd[0]),) + cmd[1:]
return helpers.run_xargs(hook, cmd, file_args, color=color) return helpers.run_xargs(hook, cmd, file_args, color=color)

View file

@ -1,15 +1,22 @@
from __future__ import unicode_literals
import contextlib import contextlib
import os import os
from typing import Generator
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
from pre_commit.envcontext import PatchesT
from pre_commit.envcontext import Var from pre_commit.envcontext import Var
from pre_commit.languages import helpers from pre_commit.languages import helpers
from pre_commit.prefix import Prefix
from pre_commit.util import clean_path_on_failure from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = 'swift_env' ENVIRONMENT_DIR = 'swift_env'
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy healthy = helpers.basic_healthy
@ -17,13 +24,13 @@ BUILD_DIR = '.build'
BUILD_CONFIG = 'release' BUILD_CONFIG = 'release'
def get_env_patch(venv): # pragma: windows no cover def get_env_patch(venv: str) -> PatchesT: # pragma: windows no cover
bin_path = os.path.join(venv, BUILD_DIR, BUILD_CONFIG) bin_path = os.path.join(venv, BUILD_DIR, BUILD_CONFIG)
return (('PATH', (bin_path, os.pathsep, Var('PATH'))),) return (('PATH', (bin_path, os.pathsep, Var('PATH'))),)
@contextlib.contextmanager @contextlib.contextmanager # pragma: windows no cover
def in_env(prefix): # pragma: windows no cover def in_env(prefix: Prefix) -> Generator[None, None, None]:
envdir = prefix.path( envdir = prefix.path(
helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT), helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
) )
@ -32,8 +39,8 @@ def in_env(prefix): # pragma: windows no cover
def install_environment( def install_environment(
prefix, version, additional_dependencies, prefix: Prefix, version: str, additional_dependencies: Sequence[str],
): # pragma: windows no cover ) -> None: # pragma: windows no cover
helpers.assert_version_default('swift', version) helpers.assert_version_default('swift', version)
helpers.assert_no_additional_deps('swift', additional_dependencies) helpers.assert_no_additional_deps('swift', additional_dependencies)
directory = prefix.path( directory = prefix.path(
@ -51,6 +58,10 @@ def install_environment(
) )
def run_hook(hook, file_args, color): # pragma: windows no cover def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
with in_env(hook.prefix): with in_env(hook.prefix):
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,7 +1,12 @@
from __future__ import unicode_literals from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from pre_commit.languages import helpers from pre_commit.languages import helpers
if TYPE_CHECKING:
from pre_commit.repository import Hook
ENVIRONMENT_DIR = None ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version get_default_version = helpers.basic_get_default_version
@ -9,5 +14,9 @@ healthy = helpers.basic_healthy
install_environment = helpers.no_install install_environment = helpers.no_install
def run_hook(hook, file_args, color): def run_hook(
hook: 'Hook',
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]:
return helpers.run_xargs(hook, hook.cmd, file_args, color=color) return helpers.run_xargs(hook, hook.cmd, file_args, color=color)

View file

@ -1,12 +1,10 @@
from __future__ import unicode_literals
import contextlib import contextlib
import logging import logging
from typing import Generator
from pre_commit import color from pre_commit import color
from pre_commit import output from pre_commit import output
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
LOG_LEVEL_COLORS = { LOG_LEVEL_COLORS = {
@ -18,15 +16,15 @@ LOG_LEVEL_COLORS = {
class LoggingHandler(logging.Handler): class LoggingHandler(logging.Handler):
def __init__(self, use_color): def __init__(self, use_color: bool) -> None:
super(LoggingHandler, self).__init__() super().__init__()
self.use_color = use_color self.use_color = use_color
def emit(self, record): def emit(self, record: logging.LogRecord) -> None:
output.write_line( output.write_line(
'{} {}'.format( '{} {}'.format(
color.format_color( color.format_color(
'[{}]'.format(record.levelname), f'[{record.levelname}]',
LOG_LEVEL_COLORS[record.levelname], LOG_LEVEL_COLORS[record.levelname],
self.use_color, self.use_color,
), ),
@ -36,8 +34,8 @@ class LoggingHandler(logging.Handler):
@contextlib.contextmanager @contextlib.contextmanager
def logging_handler(*args, **kwargs): def logging_handler(use_color: bool) -> Generator[None, None, None]:
handler = LoggingHandler(*args, **kwargs) handler = LoggingHandler(use_color)
logger.addHandler(handler) logger.addHandler(handler)
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
try: try:

View file

@ -1,9 +1,11 @@
from __future__ import unicode_literals
import argparse import argparse
import logging import logging
import os import os
import sys import sys
from typing import Any
from typing import Optional
from typing import Sequence
from typing import Union
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import color from pre_commit import color
@ -39,7 +41,7 @@ os.environ.pop('__PYVENV_LAUNCHER__', None)
COMMANDS_NO_GIT = {'clean', 'gc', 'init-templatedir', 'sample-config'} COMMANDS_NO_GIT = {'clean', 'gc', 'init-templatedir', 'sample-config'}
def _add_color_option(parser): def _add_color_option(parser: argparse.ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'--color', default=os.environ.get('PRE_COMMIT_COLOR', 'auto'), '--color', default=os.environ.get('PRE_COMMIT_COLOR', 'auto'),
type=color.use_color, type=color.use_color,
@ -48,7 +50,7 @@ def _add_color_option(parser):
) )
def _add_config_option(parser): def _add_config_option(parser: argparse.ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'-c', '--config', default=C.CONFIG_FILE, '-c', '--config', default=C.CONFIG_FILE,
help='Path to alternate config file', help='Path to alternate config file',
@ -56,18 +58,24 @@ def _add_config_option(parser):
class AppendReplaceDefault(argparse.Action): class AppendReplaceDefault(argparse.Action):
def __init__(self, *args, **kwargs): def __init__(self, *args: Any, **kwargs: Any) -> None:
super(AppendReplaceDefault, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.appended = False self.appended = False
def __call__(self, parser, namespace, values, option_string=None): def __call__(
self,
parser: argparse.ArgumentParser,
namespace: argparse.Namespace,
values: Union[str, Sequence[str], None],
option_string: Optional[str] = None,
) -> None:
if not self.appended: if not self.appended:
setattr(namespace, self.dest, []) setattr(namespace, self.dest, [])
self.appended = True self.appended = True
getattr(namespace, self.dest).append(values) getattr(namespace, self.dest).append(values)
def _add_hook_type_option(parser): def _add_hook_type_option(parser: argparse.ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'-t', '--hook-type', choices=( '-t', '--hook-type', choices=(
'pre-commit', 'pre-merge-commit', 'pre-push', 'pre-commit', 'pre-merge-commit', 'pre-push',
@ -79,7 +87,7 @@ def _add_hook_type_option(parser):
) )
def _add_run_options(parser): def _add_run_options(parser: argparse.ArgumentParser) -> None:
parser.add_argument('hook', nargs='?', help='A single hook-id to run') parser.add_argument('hook', nargs='?', help='A single hook-id to run')
parser.add_argument('--verbose', '-v', action='store_true', default=False) parser.add_argument('--verbose', '-v', action='store_true', default=False)
parser.add_argument( parser.add_argument(
@ -113,7 +121,7 @@ def _add_run_options(parser):
) )
def _adjust_args_and_chdir(args): def _adjust_args_and_chdir(args: argparse.Namespace) -> None:
# `--config` was specified relative to the non-root working directory # `--config` was specified relative to the non-root working directory
if os.path.exists(args.config): if os.path.exists(args.config):
args.config = os.path.abspath(args.config) args.config = os.path.abspath(args.config)
@ -145,7 +153,7 @@ def _adjust_args_and_chdir(args):
args.repo = os.path.relpath(args.repo) args.repo = os.path.relpath(args.repo)
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
argv = argv if argv is not None else sys.argv[1:] argv = argv if argv is not None else sys.argv[1:]
argv = [five.to_text(arg) for arg in argv] argv = [five.to_text(arg) for arg in argv]
parser = argparse.ArgumentParser(prog='pre-commit') parser = argparse.ArgumentParser(prog='pre-commit')
@ -154,7 +162,7 @@ def main(argv=None):
parser.add_argument( parser.add_argument(
'-V', '--version', '-V', '--version',
action='version', action='version',
version='%(prog)s {}'.format(C.VERSION), version=f'%(prog)s {C.VERSION}',
) )
subparsers = parser.add_subparsers(dest='command') subparsers = parser.add_subparsers(dest='command')
@ -254,7 +262,7 @@ def main(argv=None):
_add_run_options(run_parser) _add_run_options(run_parser)
sample_config_parser = subparsers.add_parser( sample_config_parser = subparsers.add_parser(
'sample-config', help='Produce a sample {} file'.format(C.CONFIG_FILE), 'sample-config', help=f'Produce a sample {C.CONFIG_FILE} file',
) )
_add_color_option(sample_config_parser) _add_color_option(sample_config_parser)
_add_config_option(sample_config_parser) _add_config_option(sample_config_parser)
@ -345,11 +353,11 @@ def main(argv=None):
return uninstall(hook_types=args.hook_types) return uninstall(hook_types=args.hook_types)
else: else:
raise NotImplementedError( raise NotImplementedError(
'Command {} not implemented.'.format(args.command), f'Command {args.command} not implemented.',
) )
raise AssertionError( raise AssertionError(
'Command {} failed to exit with a returncode'.format(args.command), f'Command {args.command} failed to exit with a returncode',
) )

View file

@ -1,10 +1,8 @@
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse import argparse
import os.path import os.path
import tarfile import tarfile
from typing import Optional
from typing import Sequence
from pre_commit import output from pre_commit import output
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
@ -27,7 +25,7 @@ REPOS = (
) )
def make_archive(name, repo, ref, destdir): def make_archive(name: str, repo: str, ref: str, destdir: str) -> str:
"""Makes an archive of a repository in the given destdir. """Makes an archive of a repository in the given destdir.
:param text name: Name to give the archive. For instance foo. The file :param text name: Name to give the archive. For instance foo. The file
@ -53,15 +51,16 @@ def make_archive(name, repo, ref, destdir):
return output_path return output_path
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--dest', default='pre_commit/resources') parser.add_argument('--dest', default='pre_commit/resources')
args = parser.parse_args(argv) args = parser.parse_args(argv)
for archive_name, repo, ref in REPOS: for archive_name, repo, ref in REPOS:
output.write_line( output.write_line(
'Making {}.tar.gz for {}@{}'.format(archive_name, repo, ref), f'Making {archive_name}.tar.gz for {repo}@{ref}',
) )
make_archive(archive_name, repo, ref, args.dest) make_archive(archive_name, repo, ref, args.dest)
return 0
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,4 +1,6 @@
import argparse import argparse
from typing import Optional
from typing import Sequence
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import git from pre_commit import git
@ -8,7 +10,7 @@ from pre_commit.repository import all_hooks
from pre_commit.store import Store from pre_commit.store import Store
def check_all_hooks_match_files(config_file): def check_all_hooks_match_files(config_file: str) -> int:
classifier = Classifier(git.get_all_files()) classifier = Classifier(git.get_all_files())
retv = 0 retv = 0
@ -16,13 +18,13 @@ def check_all_hooks_match_files(config_file):
if hook.always_run or hook.language == 'fail': if hook.always_run or hook.language == 'fail':
continue continue
elif not classifier.filenames_for_hook(hook): elif not classifier.filenames_for_hook(hook):
print('{} does not apply to this repository'.format(hook.id)) print(f'{hook.id} does not apply to this repository')
retv = 1 retv = 1
return retv return retv
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE]) parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
args = parser.parse_args(argv) args = parser.parse_args(argv)

View file

@ -1,7 +1,7 @@
from __future__ import print_function
import argparse import argparse
import re import re
from typing import Optional
from typing import Sequence
from cfgv import apply_defaults from cfgv import apply_defaults
@ -12,7 +12,11 @@ from pre_commit.clientlib import MANIFEST_HOOK_DICT
from pre_commit.commands.run import Classifier from pre_commit.commands.run import Classifier
def exclude_matches_any(filenames, include, exclude): def exclude_matches_any(
filenames: Sequence[str],
include: str,
exclude: str,
) -> bool:
if exclude == '^$': if exclude == '^$':
return True return True
include_re, exclude_re = re.compile(include), re.compile(exclude) include_re, exclude_re = re.compile(include), re.compile(exclude)
@ -22,7 +26,7 @@ def exclude_matches_any(filenames, include, exclude):
return False return False
def check_useless_excludes(config_file): def check_useless_excludes(config_file: str) -> int:
config = load_config(config_file) config = load_config(config_file)
classifier = Classifier(git.get_all_files()) classifier = Classifier(git.get_all_files())
retv = 0 retv = 0
@ -54,7 +58,7 @@ def check_useless_excludes(config_file):
return retv return retv
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE]) parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
args = parser.parse_args(argv) args = parser.parse_args(argv)

View file

@ -1,12 +1,15 @@
import sys import sys
from typing import Optional
from typing import Sequence
from pre_commit import output from pre_commit import output
def main(argv=None): def main(argv: Optional[Sequence[str]] = None) -> int:
argv = argv if argv is not None else sys.argv[1:] argv = argv if argv is not None else sys.argv[1:]
for arg in argv: for arg in argv:
output.write_line(arg) output.write_line(arg)
return 0
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,21 +1,22 @@
from __future__ import unicode_literals import contextlib
import sys import sys
from typing import IO
from typing import Optional
from typing import Union
from pre_commit import color from pre_commit import color
from pre_commit import five from pre_commit import five
from pre_commit.util import noop_context
def get_hook_message( def get_hook_message(
start, start: str,
postfix='', postfix: str = '',
end_msg=None, end_msg: Optional[str] = None,
end_len=0, end_len: int = 0,
end_color=None, end_color: Optional[str] = None,
use_color=None, use_color: Optional[bool] = None,
cols=80, cols: int = 80,
): ) -> str:
"""Prints a message for running a hook. """Prints a message for running a hook.
This currently supports three approaches: This currently supports three approaches:
@ -46,16 +47,13 @@ def get_hook_message(
) )
start...........................................................postfix end start...........................................................postfix end
""" """
if bool(end_msg) == bool(end_len):
raise ValueError('Expected one of (`end_msg`, `end_len`)')
if end_msg is not None and (end_color is None or use_color is None):
raise ValueError(
'`end_color` and `use_color` are required with `end_msg`',
)
if end_len: if end_len:
assert end_msg is None, end_msg
return start + '.' * (cols - len(start) - end_len - 1) return start + '.' * (cols - len(start) - end_len - 1)
else: else:
assert end_msg is not None
assert end_color is not None
assert use_color is not None
return '{}{}{}{}\n'.format( return '{}{}{}{}\n'.format(
start, start,
'.' * (cols - len(start) - len(postfix) - len(end_msg) - 1), '.' * (cols - len(start) - len(postfix) - len(end_msg) - 1),
@ -64,23 +62,22 @@ def get_hook_message(
) )
stdout_byte_stream = getattr(sys.stdout, 'buffer', sys.stdout) def write(s: str, stream: IO[bytes] = sys.stdout.buffer) -> None:
def write(s, stream=stdout_byte_stream):
stream.write(five.to_bytes(s)) stream.write(five.to_bytes(s))
stream.flush() stream.flush()
def write_line(s=None, stream=stdout_byte_stream, logfile_name=None): def write_line(
output_streams = [stream] s: Union[None, str, bytes] = None,
if logfile_name: stream: IO[bytes] = sys.stdout.buffer,
ctx = open(logfile_name, 'ab') logfile_name: Optional[str] = None,
output_streams.append(ctx) ) -> None:
else: with contextlib.ExitStack() as exit_stack:
ctx = noop_context() output_streams = [stream]
if logfile_name:
stream = exit_stack.enter_context(open(logfile_name, 'ab'))
output_streams.append(stream)
with ctx:
for output_stream in output_streams: for output_stream in output_streams:
if s is not None: if s is not None:
output_stream.write(five.to_bytes(s)) output_stream.write(five.to_bytes(s))

View file

@ -1,24 +1,28 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
from typing import Mapping
from typing import NoReturn
from typing import Optional
from typing import Tuple
from identify.identify import parse_shebang_from_file from identify.identify import parse_shebang_from_file
class ExecutableNotFoundError(OSError): class ExecutableNotFoundError(OSError):
def to_output(self): def to_output(self) -> Tuple[int, bytes, None]:
return (1, self.args[0].encode('UTF-8'), b'') return (1, self.args[0].encode('UTF-8'), None)
def parse_filename(filename): def parse_filename(filename: str) -> Tuple[str, ...]:
if not os.path.exists(filename): if not os.path.exists(filename):
return () return ()
else: else:
return parse_shebang_from_file(filename) return parse_shebang_from_file(filename)
def find_executable(exe, _environ=None): def find_executable(
exe: str,
_environ: Optional[Mapping[str, str]] = None,
) -> Optional[str]:
exe = os.path.normpath(exe) exe = os.path.normpath(exe)
if os.sep in exe: if os.sep in exe:
return exe return exe
@ -42,9 +46,9 @@ def find_executable(exe, _environ=None):
return None return None
def normexe(orig): def normexe(orig: str) -> str:
def _error(msg): def _error(msg: str) -> NoReturn:
raise ExecutableNotFoundError('Executable `{}` {}'.format(orig, msg)) raise ExecutableNotFoundError(f'Executable `{orig}` {msg}')
if os.sep not in orig and (not os.altsep or os.altsep not in orig): if os.sep not in orig and (not os.altsep or os.altsep not in orig):
exe = find_executable(orig) exe = find_executable(orig)
@ -61,7 +65,7 @@ def normexe(orig):
return orig return orig
def normalize_cmd(cmd): def normalize_cmd(cmd: Tuple[str, ...]) -> Tuple[str, ...]:
"""Fixes for the following issues on windows """Fixes for the following issues on windows
- https://bugs.python.org/issue8557 - https://bugs.python.org/issue8557
- windows does not parse shebangs - windows does not parse shebangs

View file

@ -1,18 +1,17 @@
from __future__ import unicode_literals
import collections
import os.path import os.path
from typing import NamedTuple
from typing import Tuple
class Prefix(collections.namedtuple('Prefix', ('prefix_dir',))): class Prefix(NamedTuple):
__slots__ = () prefix_dir: str
def path(self, *parts): def path(self, *parts: str) -> str:
return os.path.normpath(os.path.join(self.prefix_dir, *parts)) return os.path.normpath(os.path.join(self.prefix_dir, *parts))
def exists(self, *parts): def exists(self, *parts: str) -> bool:
return os.path.exists(self.path(*parts)) return os.path.exists(self.path(*parts))
def star(self, end): def star(self, end: str) -> Tuple[str, ...]:
paths = os.listdir(self.prefix_dir) paths = os.listdir(self.prefix_dir)
return tuple(path for path in paths if path.endswith(end)) return tuple(path for path in paths if path.endswith(end))

View file

@ -1,11 +1,15 @@
from __future__ import unicode_literals
import collections
import io
import json import json
import logging import logging
import os import os
import shlex import shlex
from typing import Any
from typing import Dict
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Tuple
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import five from pre_commit import five
@ -16,6 +20,7 @@ from pre_commit.clientlib import META
from pre_commit.languages.all import languages from pre_commit.languages.all import languages
from pre_commit.languages.helpers import environment_dir from pre_commit.languages.helpers import environment_dir
from pre_commit.prefix import Prefix from pre_commit.prefix import Prefix
from pre_commit.store import Store
from pre_commit.util import parse_version from pre_commit.util import parse_version
from pre_commit.util import rmtree from pre_commit.util import rmtree
@ -23,27 +28,27 @@ from pre_commit.util import rmtree
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
def _state(additional_deps): def _state(additional_deps: Sequence[str]) -> object:
return {'additional_dependencies': sorted(additional_deps)} return {'additional_dependencies': sorted(additional_deps)}
def _state_filename(prefix, venv): def _state_filename(prefix: Prefix, venv: str) -> str:
return prefix.path(venv, '.install_state_v' + C.INSTALLED_STATE_VERSION) return prefix.path(venv, '.install_state_v' + C.INSTALLED_STATE_VERSION)
def _read_state(prefix, venv): def _read_state(prefix: Prefix, venv: str) -> Optional[object]:
filename = _state_filename(prefix, venv) filename = _state_filename(prefix, venv)
if not os.path.exists(filename): if not os.path.exists(filename):
return None return None
else: else:
with io.open(filename) as f: with open(filename) as f:
return json.load(f) return json.load(f)
def _write_state(prefix, venv, state): def _write_state(prefix: Prefix, venv: str, state: object) -> None:
state_filename = _state_filename(prefix, venv) state_filename = _state_filename(prefix, venv)
staging = state_filename + 'staging' staging = state_filename + 'staging'
with io.open(staging, 'w') as state_file: with open(staging, 'w') as state_file:
state_file.write(five.to_text(json.dumps(state))) state_file.write(five.to_text(json.dumps(state)))
# Move the file into place atomically to indicate we've installed # Move the file into place atomically to indicate we've installed
os.rename(staging, state_filename) os.rename(staging, state_filename)
@ -52,15 +57,36 @@ def _write_state(prefix, venv, state):
_KEYS = tuple(item.key for item in MANIFEST_HOOK_DICT.items) _KEYS = tuple(item.key for item in MANIFEST_HOOK_DICT.items)
class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)): class Hook(NamedTuple):
__slots__ = () src: str
prefix: Prefix
id: str
name: str
entry: str
language: str
alias: str
files: str
exclude: str
types: Sequence[str]
exclude_types: Sequence[str]
additional_dependencies: Sequence[str]
args: Sequence[str]
always_run: bool
pass_filenames: bool
description: str
language_version: str
log_file: str
minimum_pre_commit_version: str
require_serial: bool
stages: Sequence[str]
verbose: bool
@property @property
def cmd(self): def cmd(self) -> Tuple[str, ...]:
return tuple(shlex.split(self.entry)) + tuple(self.args) return tuple(shlex.split(self.entry)) + tuple(self.args)
@property @property
def install_key(self): def install_key(self) -> Tuple[Prefix, str, str, Tuple[str, ...]]:
return ( return (
self.prefix, self.prefix,
self.language, self.language,
@ -68,7 +94,7 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
tuple(self.additional_dependencies), tuple(self.additional_dependencies),
) )
def installed(self): def installed(self) -> bool:
lang = languages[self.language] lang = languages[self.language]
venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version) venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version)
return ( return (
@ -81,12 +107,13 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
) )
) )
def install(self): def install(self) -> None:
logger.info('Installing environment for {}.'.format(self.src)) logger.info(f'Installing environment for {self.src}.')
logger.info('Once installed this environment will be reused.') logger.info('Once installed this environment will be reused.')
logger.info('This may take a few minutes...') logger.info('This may take a few minutes...')
lang = languages[self.language] lang = languages[self.language]
assert lang.ENVIRONMENT_DIR is not None
venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version) venv = environment_dir(lang.ENVIRONMENT_DIR, self.language_version)
# There's potentially incomplete cleanup from previous runs # There's potentially incomplete cleanup from previous runs
@ -100,12 +127,12 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
# Write our state to indicate we're installed # Write our state to indicate we're installed
_write_state(self.prefix, venv, _state(self.additional_dependencies)) _write_state(self.prefix, venv, _state(self.additional_dependencies))
def run(self, file_args, color): def run(self, file_args: Sequence[str], color: bool) -> Tuple[int, bytes]:
lang = languages[self.language] lang = languages[self.language]
return lang.run_hook(self, file_args, color) return lang.run_hook(self, file_args, color)
@classmethod @classmethod
def create(cls, src, prefix, dct): def create(cls, src: str, prefix: Prefix, dct: Dict[str, Any]) -> 'Hook':
# TODO: have cfgv do this (?) # TODO: have cfgv do this (?)
extra_keys = set(dct) - set(_KEYS) extra_keys = set(dct) - set(_KEYS)
if extra_keys: if extra_keys:
@ -116,9 +143,10 @@ class Hook(collections.namedtuple('Hook', ('src', 'prefix') + _KEYS)):
return cls(src=src, prefix=prefix, **{k: dct[k] for k in _KEYS}) return cls(src=src, prefix=prefix, **{k: dct[k] for k in _KEYS})
def _hook(*hook_dicts, **kwargs): def _hook(
root_config = kwargs.pop('root_config') *hook_dicts: Dict[str, Any],
assert not kwargs, kwargs root_config: Dict[str, Any],
) -> Dict[str, Any]:
ret, rest = dict(hook_dicts[0]), hook_dicts[1:] ret, rest = dict(hook_dicts[0]), hook_dicts[1:]
for dct in rest: for dct in rest:
ret.update(dct) ret.update(dct)
@ -146,8 +174,12 @@ def _hook(*hook_dicts, **kwargs):
return ret return ret
def _non_cloned_repository_hooks(repo_config, store, root_config): def _non_cloned_repository_hooks(
def _prefix(language_name, deps): repo_config: Dict[str, Any],
store: Store,
root_config: Dict[str, Any],
) -> Tuple[Hook, ...]:
def _prefix(language_name: str, deps: Sequence[str]) -> Prefix:
language = languages[language_name] language = languages[language_name]
# pygrep / script / system / docker_image do not have # pygrep / script / system / docker_image do not have
# environments so they work out of the current directory # environments so they work out of the current directory
@ -166,7 +198,11 @@ def _non_cloned_repository_hooks(repo_config, store, root_config):
) )
def _cloned_repository_hooks(repo_config, store, root_config): def _cloned_repository_hooks(
repo_config: Dict[str, Any],
store: Store,
root_config: Dict[str, Any],
) -> Tuple[Hook, ...]:
repo, rev = repo_config['repo'], repo_config['rev'] repo, rev = repo_config['repo'], repo_config['rev']
manifest_path = os.path.join(store.clone(repo, rev), C.MANIFEST_FILE) manifest_path = os.path.join(store.clone(repo, rev), C.MANIFEST_FILE)
by_id = {hook['id']: hook for hook in load_manifest(manifest_path)} by_id = {hook['id']: hook for hook in load_manifest(manifest_path)}
@ -195,16 +231,20 @@ def _cloned_repository_hooks(repo_config, store, root_config):
) )
def _repository_hooks(repo_config, store, root_config): def _repository_hooks(
repo_config: Dict[str, Any],
store: Store,
root_config: Dict[str, Any],
) -> Tuple[Hook, ...]:
if repo_config['repo'] in {LOCAL, META}: if repo_config['repo'] in {LOCAL, META}:
return _non_cloned_repository_hooks(repo_config, store, root_config) return _non_cloned_repository_hooks(repo_config, store, root_config)
else: else:
return _cloned_repository_hooks(repo_config, store, root_config) return _cloned_repository_hooks(repo_config, store, root_config)
def install_hook_envs(hooks, store): def install_hook_envs(hooks: Sequence[Hook], store: Store) -> None:
def _need_installed(): def _need_installed() -> List[Hook]:
seen = set() seen: Set[Tuple[Prefix, str, str, Tuple[str, ...]]] = set()
ret = [] ret = []
for hook in hooks: for hook in hooks:
if hook.install_key not in seen and not hook.installed(): if hook.install_key not in seen and not hook.installed():
@ -220,7 +260,7 @@ def install_hook_envs(hooks, store):
hook.install() hook.install()
def all_hooks(root_config, store): def all_hooks(root_config: Dict[str, Any], store: Store) -> Tuple[Hook, ...]:
return tuple( return tuple(
hook hook
for repo in root_config['repos'] for repo in root_config['repos']

View file

@ -1,11 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""File generated by pre-commit: https://pre-commit.com""" """File generated by pre-commit: https://pre-commit.com"""
from __future__ import print_function
import distutils.spawn import distutils.spawn
import os import os
import subprocess import subprocess
import sys import sys
from typing import Callable
from typing import Dict
from typing import Tuple
# work around https://github.com/Homebrew/homebrew-core/issues/30445 # work around https://github.com/Homebrew/homebrew-core/issues/30445
os.environ.pop('__PYVENV_LAUNCHER__', None) os.environ.pop('__PYVENV_LAUNCHER__', None)
@ -14,10 +15,10 @@ HERE = os.path.dirname(os.path.abspath(__file__))
Z40 = '0' * 40 Z40 = '0' * 40
ID_HASH = '138fd403232d2ddd5efb44317e38bf03' ID_HASH = '138fd403232d2ddd5efb44317e38bf03'
# start templated # start templated
CONFIG = None CONFIG = ''
HOOK_TYPE = None HOOK_TYPE = ''
INSTALL_PYTHON = None INSTALL_PYTHON = ''
SKIP_ON_MISSING_CONFIG = None SKIP_ON_MISSING_CONFIG = False
# end templated # end templated
@ -29,7 +30,7 @@ class FatalError(RuntimeError):
pass pass
def _norm_exe(exe): def _norm_exe(exe: str) -> Tuple[str, ...]:
"""Necessary for shebang support on windows. """Necessary for shebang support on windows.
roughly lifted from `identify.identify.parse_shebang` roughly lifted from `identify.identify.parse_shebang`
@ -48,7 +49,7 @@ def _norm_exe(exe):
return tuple(cmd) return tuple(cmd)
def _run_legacy(): def _run_legacy() -> Tuple[int, bytes]:
if __file__.endswith('.legacy'): if __file__.endswith('.legacy'):
raise SystemExit( raise SystemExit(
"bug: pre-commit's script is installed in migration mode\n" "bug: pre-commit's script is installed in migration mode\n"
@ -60,11 +61,11 @@ def _run_legacy():
) )
if HOOK_TYPE == 'pre-push': if HOOK_TYPE == 'pre-push':
stdin = getattr(sys.stdin, 'buffer', sys.stdin).read() stdin = sys.stdin.buffer.read()
else: else:
stdin = None stdin = b''
legacy_hook = os.path.join(HERE, '{}.legacy'.format(HOOK_TYPE)) legacy_hook = os.path.join(HERE, f'{HOOK_TYPE}.legacy')
if os.access(legacy_hook, os.X_OK): if os.access(legacy_hook, os.X_OK):
cmd = _norm_exe(legacy_hook) + (legacy_hook,) + tuple(sys.argv[1:]) cmd = _norm_exe(legacy_hook) + (legacy_hook,) + tuple(sys.argv[1:])
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE if stdin else None) proc = subprocess.Popen(cmd, stdin=subprocess.PIPE if stdin else None)
@ -74,7 +75,7 @@ def _run_legacy():
return 0, stdin return 0, stdin
def _validate_config(): def _validate_config() -> None:
cmd = ('git', 'rev-parse', '--show-toplevel') cmd = ('git', 'rev-parse', '--show-toplevel')
top_level = subprocess.check_output(cmd).decode('UTF-8').strip() top_level = subprocess.check_output(cmd).decode('UTF-8').strip()
cfg = os.path.join(top_level, CONFIG) cfg = os.path.join(top_level, CONFIG)
@ -98,7 +99,7 @@ def _validate_config():
) )
def _exe(): def _exe() -> Tuple[str, ...]:
with open(os.devnull, 'wb') as devnull: with open(os.devnull, 'wb') as devnull:
for exe in (INSTALL_PYTHON, sys.executable): for exe in (INSTALL_PYTHON, sys.executable):
try: try:
@ -118,14 +119,14 @@ def _exe():
) )
def _rev_exists(rev): def _rev_exists(rev: str) -> bool:
return not subprocess.call(('git', 'rev-list', '--quiet', rev)) return not subprocess.call(('git', 'rev-list', '--quiet', rev))
def _pre_push(stdin): def _pre_push(stdin: bytes) -> Tuple[str, ...]:
remote = sys.argv[1] remote = sys.argv[1]
opts = () opts: Tuple[str, ...] = ()
for line in stdin.decode('UTF-8').splitlines(): for line in stdin.decode('UTF-8').splitlines():
_, local_sha, _, remote_sha = line.split() _, local_sha, _, remote_sha = line.split()
if local_sha == Z40: if local_sha == Z40:
@ -136,7 +137,7 @@ def _pre_push(stdin):
# ancestors not found in remote # ancestors not found in remote
ancestors = subprocess.check_output(( ancestors = subprocess.check_output((
'git', 'rev-list', local_sha, '--topo-order', '--reverse', 'git', 'rev-list', local_sha, '--topo-order', '--reverse',
'--not', '--remotes={}'.format(remote), '--not', f'--remotes={remote}',
)).decode().strip() )).decode().strip()
if not ancestors: if not ancestors:
continue continue
@ -148,8 +149,8 @@ def _pre_push(stdin):
# pushing the whole tree including root commit # pushing the whole tree including root commit
opts = ('--all-files',) opts = ('--all-files',)
else: else:
cmd = ('git', 'rev-parse', '{}^'.format(first_ancestor)) rev_cmd = ('git', 'rev-parse', f'{first_ancestor}^')
source = subprocess.check_output(cmd).decode().strip() source = subprocess.check_output(rev_cmd).decode().strip()
opts = ('--origin', local_sha, '--source', source) opts = ('--origin', local_sha, '--source', source)
if opts: if opts:
@ -159,8 +160,8 @@ def _pre_push(stdin):
raise EarlyExit() raise EarlyExit()
def _opts(stdin): def _opts(stdin: bytes) -> Tuple[str, ...]:
fns = { fns: Dict[str, Callable[[bytes], Tuple[str, ...]]] = {
'prepare-commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]), 'prepare-commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]),
'commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]), 'commit-msg': lambda _: ('--commit-msg-filename', sys.argv[1]),
'pre-merge-commit': lambda _: (), 'pre-merge-commit': lambda _: (),
@ -172,13 +173,14 @@ def _opts(stdin):
if sys.version_info < (3, 7): # https://bugs.python.org/issue25942 if sys.version_info < (3, 7): # https://bugs.python.org/issue25942
def _subprocess_call(cmd): # this is the python 2.7 implementation # this is the python 2.7 implementation
def _subprocess_call(cmd: Tuple[str, ...]) -> int:
return subprocess.Popen(cmd).wait() return subprocess.Popen(cmd).wait()
else: else:
_subprocess_call = subprocess.call _subprocess_call = subprocess.call
def main(): def main() -> int:
retv, stdin = _run_legacy() retv, stdin = _run_legacy()
try: try:
_validate_config() _validate_config()

View file

@ -1,10 +1,8 @@
from __future__ import unicode_literals
import contextlib import contextlib
import io
import logging import logging
import os.path import os.path
import time import time
from typing import Generator
from pre_commit import git from pre_commit import git
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
@ -17,7 +15,7 @@ from pre_commit.xargs import xargs
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
def _git_apply(patch): def _git_apply(patch: str) -> None:
args = ('apply', '--whitespace=nowarn', patch) args = ('apply', '--whitespace=nowarn', patch)
try: try:
cmd_output_b('git', *args) cmd_output_b('git', *args)
@ -27,7 +25,7 @@ def _git_apply(patch):
@contextlib.contextmanager @contextlib.contextmanager
def _intent_to_add_cleared(): def _intent_to_add_cleared() -> Generator[None, None, None]:
intent_to_add = git.intent_to_add_files() intent_to_add = git.intent_to_add_files()
if intent_to_add: if intent_to_add:
logger.warning('Unstaged intent-to-add files detected.') logger.warning('Unstaged intent-to-add files detected.')
@ -42,7 +40,7 @@ def _intent_to_add_cleared():
@contextlib.contextmanager @contextlib.contextmanager
def _unstaged_changes_cleared(patch_dir): def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
tree = cmd_output('git', 'write-tree')[1].strip() tree = cmd_output('git', 'write-tree')[1].strip()
retcode, diff_stdout_binary, _ = cmd_output_b( retcode, diff_stdout_binary, _ = cmd_output_b(
'git', 'diff-index', '--ignore-submodules', '--binary', 'git', 'diff-index', '--ignore-submodules', '--binary',
@ -54,11 +52,11 @@ def _unstaged_changes_cleared(patch_dir):
patch_filename = os.path.join(patch_dir, patch_filename) patch_filename = os.path.join(patch_dir, patch_filename)
logger.warning('Unstaged files detected.') logger.warning('Unstaged files detected.')
logger.info( logger.info(
'Stashing unstaged files to {}.'.format(patch_filename), f'Stashing unstaged files to {patch_filename}.',
) )
# Save the current unstaged changes as a patch # Save the current unstaged changes as a patch
mkdirp(patch_dir) mkdirp(patch_dir)
with io.open(patch_filename, 'wb') as patch_file: with open(patch_filename, 'wb') as patch_file:
patch_file.write(diff_stdout_binary) patch_file.write(diff_stdout_binary)
# Clear the working directory of unstaged changes # Clear the working directory of unstaged changes
@ -79,7 +77,7 @@ def _unstaged_changes_cleared(patch_dir):
# Roll back the changes made by hooks. # Roll back the changes made by hooks.
cmd_output_b('git', 'checkout', '--', '.') cmd_output_b('git', 'checkout', '--', '.')
_git_apply(patch_filename) _git_apply(patch_filename)
logger.info('Restored changes from {}.'.format(patch_filename)) logger.info(f'Restored changes from {patch_filename}.')
else: else:
# There weren't any staged files so we don't need to do anything # There weren't any staged files so we don't need to do anything
# special # special
@ -87,7 +85,7 @@ def _unstaged_changes_cleared(patch_dir):
@contextlib.contextmanager @contextlib.contextmanager
def staged_files_only(patch_dir): def staged_files_only(patch_dir: str) -> Generator[None, None, None]:
"""Clear any unstaged changes from the git working directory inside this """Clear any unstaged changes from the git working directory inside this
context. context.
""" """

View file

@ -1,11 +1,14 @@
from __future__ import unicode_literals
import contextlib import contextlib
import io
import logging import logging
import os.path import os.path
import sqlite3 import sqlite3
import tempfile import tempfile
from typing import Callable
from typing import Generator
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import file_lock from pre_commit import file_lock
@ -21,7 +24,7 @@ from pre_commit.util import rmtree
logger = logging.getLogger('pre_commit') logger = logging.getLogger('pre_commit')
def _get_default_directory(): def _get_default_directory() -> str:
"""Returns the default directory for the Store. This is intentionally """Returns the default directory for the Store. This is intentionally
underscored to indicate that `Store.get_default_directory` is the intended underscored to indicate that `Store.get_default_directory` is the intended
way to get this information. This is also done so way to get this information. This is also done so
@ -34,16 +37,16 @@ def _get_default_directory():
) )
class Store(object): class Store:
get_default_directory = staticmethod(_get_default_directory) get_default_directory = staticmethod(_get_default_directory)
def __init__(self, directory=None): def __init__(self, directory: Optional[str] = None) -> None:
self.directory = directory or Store.get_default_directory() self.directory = directory or Store.get_default_directory()
self.db_path = os.path.join(self.directory, 'db.db') self.db_path = os.path.join(self.directory, 'db.db')
if not os.path.exists(self.directory): if not os.path.exists(self.directory):
mkdirp(self.directory) mkdirp(self.directory)
with io.open(os.path.join(self.directory, 'README'), 'w') as f: with open(os.path.join(self.directory, 'README'), 'w') as f:
f.write( f.write(
'This directory is maintained by the pre-commit project.\n' 'This directory is maintained by the pre-commit project.\n'
'Learn more: https://github.com/pre-commit/pre-commit\n', 'Learn more: https://github.com/pre-commit/pre-commit\n',
@ -69,21 +72,24 @@ class Store(object):
' PRIMARY KEY (repo, ref)' ' PRIMARY KEY (repo, ref)'
');', ');',
) )
self._create_config_table_if_not_exists(db) self._create_config_table(db)
# Atomic file move # Atomic file move
os.rename(tmpfile, self.db_path) os.rename(tmpfile, self.db_path)
@contextlib.contextmanager @contextlib.contextmanager
def exclusive_lock(self): def exclusive_lock(self) -> Generator[None, None, None]:
def blocked_cb(): # pragma: no cover (tests are single-process) def blocked_cb() -> None: # pragma: no cover (tests are in-process)
logger.info('Locking pre-commit directory') logger.info('Locking pre-commit directory')
with file_lock.lock(os.path.join(self.directory, '.lock'), blocked_cb): with file_lock.lock(os.path.join(self.directory, '.lock'), blocked_cb):
yield yield
@contextlib.contextmanager @contextlib.contextmanager
def connect(self, db_path=None): def connect(
self,
db_path: Optional[str] = None,
) -> Generator[sqlite3.Connection, None, None]:
db_path = db_path or self.db_path db_path = db_path or self.db_path
# sqlite doesn't close its fd with its contextmanager >.< # sqlite doesn't close its fd with its contextmanager >.<
# contextlib.closing fixes this. # contextlib.closing fixes this.
@ -94,24 +100,29 @@ class Store(object):
yield db yield db
@classmethod @classmethod
def db_repo_name(cls, repo, deps): def db_repo_name(cls, repo: str, deps: Sequence[str]) -> str:
if deps: if deps:
return '{}:{}'.format(repo, ','.join(sorted(deps))) return '{}:{}'.format(repo, ','.join(sorted(deps)))
else: else:
return repo return repo
def _new_repo(self, repo, ref, deps, make_strategy): def _new_repo(
self,
repo: str,
ref: str,
deps: Sequence[str],
make_strategy: Callable[[str], None],
) -> str:
repo = self.db_repo_name(repo, deps) repo = self.db_repo_name(repo, deps)
def _get_result(): def _get_result() -> Optional[str]:
# Check if we already exist # Check if we already exist
with self.connect() as db: with self.connect() as db:
result = db.execute( result = db.execute(
'SELECT path FROM repos WHERE repo = ? AND ref = ?', 'SELECT path FROM repos WHERE repo = ? AND ref = ?',
(repo, ref), (repo, ref),
).fetchone() ).fetchone()
if result: return result[0] if result else None
return result[0]
result = _get_result() result = _get_result()
if result: if result:
@ -122,7 +133,7 @@ class Store(object):
if result: # pragma: no cover (race) if result: # pragma: no cover (race)
return result return result
logger.info('Initializing environment for {}.'.format(repo)) logger.info(f'Initializing environment for {repo}.')
directory = tempfile.mkdtemp(prefix='repo', dir=self.directory) directory = tempfile.mkdtemp(prefix='repo', dir=self.directory)
with clean_path_on_failure(directory): with clean_path_on_failure(directory):
@ -136,14 +147,14 @@ class Store(object):
) )
return directory return directory
def _complete_clone(self, ref, git_cmd): def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
"""Perform a complete clone of a repository and its submodules """ """Perform a complete clone of a repository and its submodules """
git_cmd('fetch', 'origin', '--tags') git_cmd('fetch', 'origin', '--tags')
git_cmd('checkout', ref) git_cmd('checkout', ref)
git_cmd('submodule', 'update', '--init', '--recursive') git_cmd('submodule', 'update', '--init', '--recursive')
def _shallow_clone(self, ref, git_cmd): def _shallow_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
"""Perform a shallow clone of a repository and its submodules """ """Perform a shallow clone of a repository and its submodules """
git_config = 'protocol.version=2' git_config = 'protocol.version=2'
@ -154,14 +165,14 @@ class Store(object):
'--depth=1', '--depth=1',
) )
def clone(self, repo, ref, deps=()): def clone(self, repo: str, ref: str, deps: Sequence[str] = ()) -> str:
"""Clone the given url and checkout the specific ref.""" """Clone the given url and checkout the specific ref."""
def clone_strategy(directory): def clone_strategy(directory: str) -> None:
git.init_repo(directory, repo) git.init_repo(directory, repo)
env = git.no_git_env() env = git.no_git_env()
def _git_cmd(*args): def _git_cmd(*args: str) -> None:
cmd_output_b('git', *args, cwd=directory, env=env) cmd_output_b('git', *args, cwd=directory, env=env)
try: try:
@ -176,17 +187,17 @@ class Store(object):
'pre_commit_dummy_package.gemspec', 'setup.py', 'environment.yml', 'pre_commit_dummy_package.gemspec', 'setup.py', 'environment.yml',
) )
def make_local(self, deps): def make_local(self, deps: Sequence[str]) -> str:
def make_local_strategy(directory): def make_local_strategy(directory: str) -> None:
for resource in self.LOCAL_RESOURCES: for resource in self.LOCAL_RESOURCES:
contents = resource_text('empty_template_{}'.format(resource)) contents = resource_text(f'empty_template_{resource}')
with io.open(os.path.join(directory, resource), 'w') as f: with open(os.path.join(directory, resource), 'w') as f:
f.write(contents) f.write(contents)
env = git.no_git_env() env = git.no_git_env()
# initialize the git repository so it looks more like cloned repos # initialize the git repository so it looks more like cloned repos
def _git_cmd(*args): def _git_cmd(*args: str) -> None:
cmd_output_b('git', *args, cwd=directory, env=env) cmd_output_b('git', *args, cwd=directory, env=env)
git.init_repo(directory, '<<unknown>>') git.init_repo(directory, '<<unknown>>')
@ -197,7 +208,7 @@ class Store(object):
'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy, 'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy,
) )
def _create_config_table_if_not_exists(self, db): def _create_config_table(self, db: sqlite3.Connection) -> None:
db.executescript( db.executescript(
'CREATE TABLE IF NOT EXISTS configs (' 'CREATE TABLE IF NOT EXISTS configs ('
' path TEXT NOT NULL,' ' path TEXT NOT NULL,'
@ -205,32 +216,32 @@ class Store(object):
');', ');',
) )
def mark_config_used(self, path): def mark_config_used(self, path: str) -> None:
path = os.path.realpath(path) path = os.path.realpath(path)
# don't insert config files that do not exist # don't insert config files that do not exist
if not os.path.exists(path): if not os.path.exists(path):
return return
with self.connect() as db: with self.connect() as db:
# TODO: eventually remove this and only create in _create # TODO: eventually remove this and only create in _create
self._create_config_table_if_not_exists(db) self._create_config_table(db)
db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,)) db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,))
def select_all_configs(self): def select_all_configs(self) -> List[str]:
with self.connect() as db: with self.connect() as db:
self._create_config_table_if_not_exists(db) self._create_config_table(db)
rows = db.execute('SELECT path FROM configs').fetchall() rows = db.execute('SELECT path FROM configs').fetchall()
return [path for path, in rows] return [path for path, in rows]
def delete_configs(self, configs): def delete_configs(self, configs: List[str]) -> None:
with self.connect() as db: with self.connect() as db:
rows = [(path,) for path in configs] rows = [(path,) for path in configs]
db.executemany('DELETE FROM configs WHERE path = ?', rows) db.executemany('DELETE FROM configs WHERE path = ?', rows)
def select_all_repos(self): def select_all_repos(self) -> List[Tuple[str, str, str]]:
with self.connect() as db: with self.connect() as db:
return db.execute('SELECT repo, ref, path from repos').fetchall() return db.execute('SELECT repo, ref, path from repos').fetchall()
def delete_repo(self, db_repo_name, ref, path): def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None:
with self.connect() as db: with self.connect() as db:
db.execute( db.execute(
'DELETE FROM repos WHERE repo = ? and ref = ?', 'DELETE FROM repos WHERE repo = ? and ref = ?',

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import contextlib import contextlib
import errno import errno
import os.path import os.path
@ -8,8 +6,16 @@ import stat
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
from types import TracebackType
import six from typing import Any
from typing import Callable
from typing import Dict
from typing import Generator
from typing import IO
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from pre_commit import five from pre_commit import five
from pre_commit import parse_shebang from pre_commit import parse_shebang
@ -21,8 +27,10 @@ else: # pragma: no cover (<PY37)
from importlib_resources import open_binary from importlib_resources import open_binary
from importlib_resources import read_text from importlib_resources import read_text
EnvironT = Union[Dict[str, str], 'os._Environ']
def mkdirp(path):
def mkdirp(path: str) -> None:
try: try:
os.makedirs(path) os.makedirs(path)
except OSError: except OSError:
@ -31,7 +39,7 @@ def mkdirp(path):
@contextlib.contextmanager @contextlib.contextmanager
def clean_path_on_failure(path): def clean_path_on_failure(path: str) -> Generator[None, None, None]:
"""Cleans up the directory on an exceptional failure.""" """Cleans up the directory on an exceptional failure."""
try: try:
yield yield
@ -42,12 +50,12 @@ def clean_path_on_failure(path):
@contextlib.contextmanager @contextlib.contextmanager
def noop_context(): def noop_context() -> Generator[None, None, None]:
yield yield
@contextlib.contextmanager @contextlib.contextmanager
def tmpdir(): def tmpdir() -> Generator[str, None, None]:
"""Contextmanager to create a temporary directory. It will be cleaned up """Contextmanager to create a temporary directory. It will be cleaned up
afterwards. afterwards.
""" """
@ -58,15 +66,15 @@ def tmpdir():
rmtree(tempdir) rmtree(tempdir)
def resource_bytesio(filename): def resource_bytesio(filename: str) -> IO[bytes]:
return open_binary('pre_commit.resources', filename) return open_binary('pre_commit.resources', filename)
def resource_text(filename): def resource_text(filename: str) -> str:
return read_text('pre_commit.resources', filename) return read_text('pre_commit.resources', filename)
def make_executable(filename): def make_executable(filename: str) -> None:
original_mode = os.stat(filename).st_mode original_mode = os.stat(filename).st_mode
os.chmod( os.chmod(
filename, original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH, filename, original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
@ -74,18 +82,23 @@ def make_executable(filename):
class CalledProcessError(RuntimeError): class CalledProcessError(RuntimeError):
def __init__(self, returncode, cmd, expected_returncode, stdout, stderr): def __init__(
super(CalledProcessError, self).__init__( self,
returncode, cmd, expected_returncode, stdout, stderr, returncode: int,
) cmd: Tuple[str, ...],
expected_returncode: int,
stdout: bytes,
stderr: Optional[bytes],
) -> None:
super().__init__(returncode, cmd, expected_returncode, stdout, stderr)
self.returncode = returncode self.returncode = returncode
self.cmd = cmd self.cmd = cmd
self.expected_returncode = expected_returncode self.expected_returncode = expected_returncode
self.stdout = stdout self.stdout = stdout
self.stderr = stderr self.stderr = stderr
def to_bytes(self): def __bytes__(self) -> bytes:
def _indent_or_none(part): def _indent_or_none(part: Optional[bytes]) -> bytes:
if part: if part:
return b'\n ' + part.replace(b'\n', b'\n ') return b'\n ' + part.replace(b'\n', b'\n ')
else: else:
@ -101,18 +114,14 @@ class CalledProcessError(RuntimeError):
b'stderr:', _indent_or_none(self.stderr), b'stderr:', _indent_or_none(self.stderr),
)) ))
def to_text(self): def __str__(self) -> str:
return self.to_bytes().decode('UTF-8') return self.__bytes__().decode('UTF-8')
if six.PY2: # pragma: no cover (py2)
__str__ = to_bytes
__unicode__ = to_text
else: # pragma: no cover (py3)
__bytes__ = to_bytes
__str__ = to_text
def _cmd_kwargs(*cmd, **kwargs): def _cmd_kwargs(
*cmd: str,
**kwargs: Any,
) -> Tuple[Tuple[str, ...], Dict[str, Any]]:
# py2/py3 on windows are more strict about the types here # py2/py3 on windows are more strict about the types here
cmd = tuple(five.n(arg) for arg in cmd) cmd = tuple(five.n(arg) for arg in cmd)
kwargs['env'] = { kwargs['env'] = {
@ -124,7 +133,10 @@ def _cmd_kwargs(*cmd, **kwargs):
return cmd, kwargs return cmd, kwargs
def cmd_output_b(*cmd, **kwargs): def cmd_output_b(
*cmd: str,
**kwargs: Any,
) -> Tuple[int, bytes, Optional[bytes]]:
retcode = kwargs.pop('retcode', 0) retcode = kwargs.pop('retcode', 0)
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs) cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
@ -143,7 +155,7 @@ def cmd_output_b(*cmd, **kwargs):
return returncode, stdout_b, stderr_b return returncode, stdout_b, stderr_b
def cmd_output(*cmd, **kwargs): def cmd_output(*cmd: str, **kwargs: Any) -> Tuple[int, str, Optional[str]]:
returncode, stdout_b, stderr_b = cmd_output_b(*cmd, **kwargs) returncode, stdout_b, stderr_b = cmd_output_b(*cmd, **kwargs)
stdout = stdout_b.decode('UTF-8') if stdout_b is not None else None stdout = stdout_b.decode('UTF-8') if stdout_b is not None else None
stderr = stderr_b.decode('UTF-8') if stderr_b is not None else None stderr = stderr_b.decode('UTF-8') if stderr_b is not None else None
@ -154,35 +166,45 @@ if os.name != 'nt': # pragma: windows no cover
from os import openpty from os import openpty
import termios import termios
class Pty(object): class Pty:
def __init__(self): def __init__(self) -> None:
self.r = self.w = None self.r: Optional[int] = None
self.w: Optional[int] = None
def __enter__(self): def __enter__(self) -> 'Pty':
self.r, self.w = openpty() self.r, self.w = openpty()
# tty flags normally change \n to \r\n # tty flags normally change \n to \r\n
attrs = termios.tcgetattr(self.r) attrs = termios.tcgetattr(self.r)
assert isinstance(attrs[1], int)
attrs[1] &= ~(termios.ONLCR | termios.OPOST) attrs[1] &= ~(termios.ONLCR | termios.OPOST)
termios.tcsetattr(self.r, termios.TCSANOW, attrs) termios.tcsetattr(self.r, termios.TCSANOW, attrs)
return self return self
def close_w(self): def close_w(self) -> None:
if self.w is not None: if self.w is not None:
os.close(self.w) os.close(self.w)
self.w = None self.w = None
def close_r(self): def close_r(self) -> None:
assert self.r is not None assert self.r is not None
os.close(self.r) os.close(self.r)
self.r = None self.r = None
def __exit__(self, exc_type, exc_value, traceback): def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
self.close_w() self.close_w()
self.close_r() self.close_r()
def cmd_output_p(*cmd, **kwargs): def cmd_output_p(
*cmd: str,
**kwargs: Any,
) -> Tuple[int, bytes, Optional[bytes]]:
assert kwargs.pop('retcode') is None assert kwargs.pop('retcode') is None
assert kwargs['stderr'] == subprocess.STDOUT, kwargs['stderr'] assert kwargs['stderr'] == subprocess.STDOUT, kwargs['stderr']
cmd, kwargs = _cmd_kwargs(*cmd, **kwargs) cmd, kwargs = _cmd_kwargs(*cmd, **kwargs)
@ -193,6 +215,7 @@ if os.name != 'nt': # pragma: windows no cover
return e.to_output() return e.to_output()
with open(os.devnull) as devnull, Pty() as pty: with open(os.devnull) as devnull, Pty() as pty:
assert pty.r is not None
kwargs.update({'stdin': devnull, 'stdout': pty.w, 'stderr': pty.w}) kwargs.update({'stdin': devnull, 'stdout': pty.w, 'stderr': pty.w})
proc = subprocess.Popen(cmd, **kwargs) proc = subprocess.Popen(cmd, **kwargs)
pty.close_w() pty.close_w()
@ -216,9 +239,13 @@ else: # pragma: no cover
cmd_output_p = cmd_output_b cmd_output_p = cmd_output_b
def rmtree(path): def rmtree(path: str) -> None:
"""On windows, rmtree fails for readonly dirs.""" """On windows, rmtree fails for readonly dirs."""
def handle_remove_readonly(func, path, exc): def handle_remove_readonly(
func: Callable[..., Any],
path: str,
exc: Tuple[Type[OSError], OSError, TracebackType],
) -> None:
excvalue = exc[1] excvalue = exc[1]
if ( if (
func in (os.rmdir, os.remove, os.unlink) and func in (os.rmdir, os.remove, os.unlink) and
@ -232,6 +259,6 @@ def rmtree(path):
shutil.rmtree(path, ignore_errors=False, onerror=handle_remove_readonly) shutil.rmtree(path, ignore_errors=False, onerror=handle_remove_readonly)
def parse_version(s): def parse_version(s: str) -> Tuple[int, ...]:
"""poor man's version comparison""" """poor man's version comparison"""
return tuple(int(p) for p in s.split('.')) return tuple(int(p) for p in s.split('.'))

View file

@ -1,22 +1,29 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import concurrent.futures import concurrent.futures
import contextlib import contextlib
import math import math
import os import os
import subprocess import subprocess
import sys import sys
from typing import Any
import six from typing import Callable
from typing import Generator
from typing import Iterable
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TypeVar
from pre_commit import parse_shebang from pre_commit import parse_shebang
from pre_commit.util import cmd_output_b from pre_commit.util import cmd_output_b
from pre_commit.util import cmd_output_p from pre_commit.util import cmd_output_p
from pre_commit.util import EnvironT
TArg = TypeVar('TArg')
TRet = TypeVar('TRet')
def _environ_size(_env=None): def _environ_size(_env: Optional[EnvironT] = None) -> int:
environ = _env if _env is not None else getattr(os, 'environb', os.environ) environ = _env if _env is not None else getattr(os, 'environb', os.environ)
size = 8 * len(environ) # number of pointers in `envp` size = 8 * len(environ) # number of pointers in `envp`
for k, v in environ.items(): for k, v in environ.items():
@ -24,9 +31,9 @@ def _environ_size(_env=None):
return size return size
def _get_platform_max_length(): # pragma: no cover (platform specific) def _get_platform_max_length() -> int: # pragma: no cover (platform specific)
if os.name == 'posix': if os.name == 'posix':
maximum = os.sysconf(str('SC_ARG_MAX')) - 2048 - _environ_size() maximum = os.sysconf('SC_ARG_MAX') - 2048 - _environ_size()
maximum = max(min(maximum, 2 ** 17), 2 ** 12) maximum = max(min(maximum, 2 ** 17), 2 ** 12)
return maximum return maximum
elif os.name == 'nt': elif os.name == 'nt':
@ -36,17 +43,14 @@ def _get_platform_max_length(): # pragma: no cover (platform specific)
return 2 ** 12 return 2 ** 12
def _command_length(*cmd): def _command_length(*cmd: str) -> int:
full_cmd = ' '.join(cmd) full_cmd = ' '.join(cmd)
# win32 uses the amount of characters, more details at: # win32 uses the amount of characters, more details at:
# https://github.com/pre-commit/pre-commit/pull/839 # https://github.com/pre-commit/pre-commit/pull/839
if sys.platform == 'win32': if sys.platform == 'win32':
# the python2.x apis require bytes, we encode as UTF-8 # the python2.x apis require bytes, we encode as UTF-8
if six.PY2: return len(full_cmd.encode('utf-16le')) // 2
return len(full_cmd.encode('utf-8'))
else:
return len(full_cmd.encode('utf-16le')) // 2
else: else:
return len(full_cmd.encode(sys.getfilesystemencoding())) return len(full_cmd.encode(sys.getfilesystemencoding()))
@ -55,7 +59,12 @@ class ArgumentTooLongError(RuntimeError):
pass pass
def partition(cmd, varargs, target_concurrency, _max_length=None): def partition(
cmd: Sequence[str],
varargs: Sequence[str],
target_concurrency: int,
_max_length: Optional[int] = None,
) -> Tuple[Tuple[str, ...], ...]:
_max_length = _max_length or _get_platform_max_length() _max_length = _max_length or _get_platform_max_length()
# Generally, we try to partition evenly into at least `target_concurrency` # Generally, we try to partition evenly into at least `target_concurrency`
@ -65,7 +74,7 @@ def partition(cmd, varargs, target_concurrency, _max_length=None):
cmd = tuple(cmd) cmd = tuple(cmd)
ret = [] ret = []
ret_cmd = [] ret_cmd: List[str] = []
# Reversed so arguments are in order # Reversed so arguments are in order
varargs = list(reversed(varargs)) varargs = list(reversed(varargs))
@ -95,7 +104,10 @@ def partition(cmd, varargs, target_concurrency, _max_length=None):
@contextlib.contextmanager @contextlib.contextmanager
def _thread_mapper(maxsize): def _thread_mapper(maxsize: int) -> Generator[
Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]],
None, None,
]:
if maxsize == 1: if maxsize == 1:
yield map yield map
else: else:
@ -103,7 +115,11 @@ def _thread_mapper(maxsize):
yield ex.map yield ex.map
def xargs(cmd, varargs, **kwargs): def xargs(
cmd: Tuple[str, ...],
varargs: Sequence[str],
**kwargs: Any,
) -> Tuple[int, bytes]:
"""A simplified implementation of xargs. """A simplified implementation of xargs.
color: Make a pty if on a platform that supports it color: Make a pty if on a platform that supports it
@ -123,9 +139,11 @@ def xargs(cmd, varargs, **kwargs):
partitions = partition(cmd, varargs, target_concurrency, max_length) partitions = partition(cmd, varargs, target_concurrency, max_length)
def run_cmd_partition(run_cmd): def run_cmd_partition(
run_cmd: Tuple[str, ...],
) -> Tuple[int, bytes, Optional[bytes]]:
return cmd_fn( return cmd_fn(
*run_cmd, retcode=None, stderr=subprocess.STDOUT, **kwargs *run_cmd, retcode=None, stderr=subprocess.STDOUT, **kwargs,
) )
threads = min(len(partitions), target_concurrency) threads = min(len(partitions), target_concurrency)

View file

@ -1,6 +1,5 @@
-e . -e .
coverage coverage
mock
pytest pytest
pytest-env pytest-env

View file

@ -11,10 +11,8 @@ license = MIT
license_file = LICENSE license_file = LICENSE
classifiers = classifiers =
License :: OSI Approved :: MIT License License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.5 Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.8
@ -29,13 +27,11 @@ install_requires =
identify>=1.0.0 identify>=1.0.0
nodeenv>=0.11.1 nodeenv>=0.11.1
pyyaml pyyaml
six
toml toml
virtualenv>=15.2 virtualenv>=15.2
futures;python_version<"3.2"
importlib-metadata;python_version<"3.8" importlib-metadata;python_version<"3.8"
importlib-resources;python_version<"3.7" importlib-resources;python_version<"3.7"
python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* python_requires = >=3.6
[options.entry_points] [options.entry_points]
console_scripts = console_scripts =
@ -56,3 +52,16 @@ exclude =
[bdist_wheel] [bdist_wheel]
universal = True universal = True
[mypy]
check_untyped_defs = true
disallow_any_generics = true
disallow_incomplete_defs = true
disallow_untyped_defs = true
no_implicit_optional = true
[mypy-testing.*]
disallow_untyped_defs = false
[mypy-tests.*]
disallow_untyped_defs = false

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import collections import collections

View file

@ -1,8 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib import contextlib
import io
import os.path import os.path
import shutil import shutil
@ -58,10 +54,10 @@ def modify_manifest(path, commit=True):
.pre-commit-hooks.yaml. .pre-commit-hooks.yaml.
""" """
manifest_path = os.path.join(path, C.MANIFEST_FILE) manifest_path = os.path.join(path, C.MANIFEST_FILE)
with io.open(manifest_path) as f: with open(manifest_path) as f:
manifest = ordered_load(f.read()) manifest = ordered_load(f.read())
yield manifest yield manifest
with io.open(manifest_path, 'w') as manifest_file: with open(manifest_path, 'w') as manifest_file:
manifest_file.write(ordered_dump(manifest, **C.YAML_DUMP_KWARGS)) manifest_file.write(ordered_dump(manifest, **C.YAML_DUMP_KWARGS))
if commit: if commit:
git_commit(msg=modify_manifest.__name__, cwd=path) git_commit(msg=modify_manifest.__name__, cwd=path)
@ -73,10 +69,10 @@ def modify_config(path='.', commit=True):
.pre-commit-config.yaml .pre-commit-config.yaml
""" """
config_path = os.path.join(path, C.CONFIG_FILE) config_path = os.path.join(path, C.CONFIG_FILE)
with io.open(config_path) as f: with open(config_path) as f:
config = ordered_load(f.read()) config = ordered_load(f.read())
yield config yield config
with io.open(config_path, 'w', encoding='UTF-8') as config_file: with open(config_path, 'w', encoding='UTF-8') as config_file:
config_file.write(ordered_dump(config, **C.YAML_DUMP_KWARGS)) config_file.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))
if commit: if commit:
git_commit(msg=modify_config.__name__, cwd=path) git_commit(msg=modify_config.__name__, cwd=path)
@ -101,7 +97,7 @@ def sample_meta_config():
def make_config_from_repo(repo_path, rev=None, hooks=None, check=True): def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE)) manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
config = { config = {
'repo': 'file://{}'.format(repo_path), 'repo': f'file://{repo_path}',
'rev': rev or git.head_rev(repo_path), 'rev': rev or git.head_rev(repo_path),
'hooks': hooks or [{'id': hook['id']} for hook in manifest], 'hooks': hooks or [{'id': hook['id']} for hook in manifest],
} }
@ -117,7 +113,7 @@ def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
def read_config(directory, config_file=C.CONFIG_FILE): def read_config(directory, config_file=C.CONFIG_FILE):
config_path = os.path.join(directory, config_file) config_path = os.path.join(directory, config_file)
with io.open(config_path) as f: with open(config_path) as f:
config = ordered_load(f.read()) config = ordered_load(f.read())
return config return config
@ -126,7 +122,7 @@ def write_config(directory, config, config_file=C.CONFIG_FILE):
if type(config) is not list and 'repos' not in config: if type(config) is not list and 'repos' not in config:
assert isinstance(config, dict), config assert isinstance(config, dict), config
config = {'repos': [config]} config = {'repos': [config]}
with io.open(os.path.join(directory, config_file), 'w') as outfile: with open(os.path.join(directory, config_file), 'w') as outfile:
outfile.write(ordered_dump(config, **C.YAML_DUMP_KWARGS)) outfile.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))

27
testing/gen-languages-all Executable file
View file

@ -0,0 +1,27 @@
#!/usr/bin/env python3
import sys
LANGUAGES = [
'conda', 'docker', 'docker_image', 'fail', 'golang', 'node', 'pygrep',
'python', 'python_venv', 'ruby', 'rust', 'script', 'swift', 'system',
]
FIELDS = [
'ENVIRONMENT_DIR', 'get_default_version', 'healthy', 'install_environment',
'run_hook',
]
def main() -> int:
print(f' # BEGIN GENERATED ({sys.argv[0]})')
for lang in LANGUAGES:
parts = [f' {lang!r}: Language(name={lang!r}']
for k in FIELDS:
parts.append(f', {k}={lang}.{k}')
parts.append('), # noqa: E501')
print(''.join(parts))
print(' # END GENERATED')
return 0
if __name__ == '__main__':
exit(main())

View file

@ -1,14 +1,14 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# This is a script used in travis-ci to install swift # This is a script used in CI to install swift
set -euxo pipefail set -euxo pipefail
. /etc/lsb-release . /etc/lsb-release
if [ "$DISTRIB_CODENAME" = "trusty" ]; then if [ "$DISTRIB_CODENAME" = "bionic" ]; then
SWIFT_URL='https://swift.org/builds/swift-4.0.3-release/ubuntu1404/swift-4.0.3-RELEASE/swift-4.0.3-RELEASE-ubuntu14.04.tar.gz' SWIFT_URL='https://swift.org/builds/swift-5.1.3-release/ubuntu1804/swift-5.1.3-RELEASE/swift-5.1.3-RELEASE-ubuntu18.04.tar.gz'
SWIFT_HASH="dddb40ec4956e4f6a3f4532d859691d5d1ba8822f6e8b4ec6c452172dbede5ae" SWIFT_HASH='ac82ccd773fe3d586fc340814e31e120da1ff695c6a712f6634e9cc720769610'
else else
SWIFT_URL='https://swift.org/builds/swift-4.0.3-release/ubuntu1604/swift-4.0.3-RELEASE/swift-4.0.3-RELEASE-ubuntu16.04.tar.gz' echo "unknown dist: ${DISTRIB_CODENAME}" 1>&2
SWIFT_HASH="9adf64cabc7c02ea2d08f150b449b05e46bd42d6e542bf742b3674f5c37f0dbf" exit 1
fi fi
check() { check() {

View file

@ -1,5 +1,3 @@
from __future__ import print_function
import sys import sys

View file

@ -1,5 +1,3 @@
from __future__ import print_function
import sys import sys

View file

@ -1,5 +1,3 @@
from __future__ import print_function
import sys import sys

View file

@ -5,7 +5,7 @@ import sys
def main(): def main():
for i in range(6): for i in range(6):
f = sys.stdout if i % 2 == 0 else sys.stderr f = sys.stdout if i % 2 == 0 else sys.stderr
f.write('{}\n'.format(i)) f.write(f'{i}\n')
f.flush() f.flush()

View file

@ -1,5 +1,7 @@
// swift-tools-version:5.0
import PackageDescription import PackageDescription
let package = Package( let package = Package(
name: "swift_hooks_repo" name: "swift_hooks_repo",
targets: [.target(name: "swift_hooks_repo")]
) )

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import contextlib import contextlib
import os.path import os.path
import subprocess import subprocess
@ -50,7 +48,7 @@ def broken_deep_listdir(): # pragma: no cover (platform specific)
if sys.platform != 'win32': if sys.platform != 'win32':
return False return False
try: try:
os.listdir(str('\\\\?\\') + os.path.abspath(str('.'))) os.listdir('\\\\?\\' + os.path.abspath('.'))
except OSError: except OSError:
return True return True
try: try:

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import logging import logging
import cfgv import cfgv

View file

@ -1,8 +1,6 @@
from __future__ import unicode_literals
import sys import sys
from unittest import mock
import mock
import pytest import pytest
from pre_commit import envcontext from pre_commit import envcontext
@ -14,7 +12,7 @@ from pre_commit.color import use_color
@pytest.mark.parametrize( @pytest.mark.parametrize(
('in_text', 'in_color', 'in_use_color', 'expected'), ( ('in_text', 'in_color', 'in_use_color', 'expected'), (
('foo', GREEN, True, '{}foo\033[0m'.format(GREEN)), ('foo', GREEN, True, f'{GREEN}foo\033[0m'),
('foo', GREEN, False, 'foo'), ('foo', GREEN, False, 'foo'),
), ),
) )
@ -39,21 +37,21 @@ def test_use_color_no_tty():
def test_use_color_tty_with_color_support(): def test_use_color_tty_with_color_support():
with mock.patch.object(sys.stdout, 'isatty', return_value=True): with mock.patch.object(sys.stdout, 'isatty', return_value=True):
with mock.patch('pre_commit.color.terminal_supports_color', True): with mock.patch('pre_commit.color.terminal_supports_color', True):
with envcontext.envcontext([('TERM', envcontext.UNSET)]): with envcontext.envcontext((('TERM', envcontext.UNSET),)):
assert use_color('auto') is True assert use_color('auto') is True
def test_use_color_tty_without_color_support(): def test_use_color_tty_without_color_support():
with mock.patch.object(sys.stdout, 'isatty', return_value=True): with mock.patch.object(sys.stdout, 'isatty', return_value=True):
with mock.patch('pre_commit.color.terminal_supports_color', False): with mock.patch('pre_commit.color.terminal_supports_color', False):
with envcontext.envcontext([('TERM', envcontext.UNSET)]): with envcontext.envcontext((('TERM', envcontext.UNSET),)):
assert use_color('auto') is False assert use_color('auto') is False
def test_use_color_dumb_term(): def test_use_color_dumb_term():
with mock.patch.object(sys.stdout, 'isatty', return_value=True): with mock.patch.object(sys.stdout, 'isatty', return_value=True):
with mock.patch('pre_commit.color.terminal_supports_color', True): with mock.patch('pre_commit.color.terminal_supports_color', True):
with envcontext.envcontext([('TERM', 'dumb')]): with envcontext.envcontext((('TERM', 'dumb'),)):
assert use_color('auto') is False assert use_color('auto') is False

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import pipes import pipes
import pytest import pytest
@ -213,7 +211,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name(
with open(C.CONFIG_FILE) as f: with open(C.CONFIG_FILE) as f:
before = f.read() before = f.read()
repo_name = 'file://{}'.format(out_of_date.path) repo_name = f'file://{out_of_date.path}'
ret = autoupdate( ret = autoupdate(
C.CONFIG_FILE, store, freeze=False, tags_only=False, C.CONFIG_FILE, store, freeze=False, tags_only=False,
repos=(repo_name,), repos=(repo_name,),
@ -312,7 +310,7 @@ def test_autoupdate_freeze(tagged, in_tmpdir, store):
assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0 assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0
with open(C.CONFIG_FILE) as f: with open(C.CONFIG_FILE) as f:
expected = 'rev: {} # frozen: v1.2.3'.format(tagged.head_rev) expected = f'rev: {tagged.head_rev} # frozen: v1.2.3'
assert expected in f.read() assert expected in f.read()
# if we un-freeze it should remove the frozen comment # if we un-freeze it should remove the frozen comment

View file

@ -1,8 +1,6 @@
from __future__ import unicode_literals
import os.path import os.path
from unittest import mock
import mock
import pytest import pytest
from pre_commit.commands.clean import clean from pre_commit.commands.clean import clean

View file

@ -1,6 +1,5 @@
import os.path import os.path
from unittest import mock
import mock
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.commands.init_templatedir import init_templatedir from pre_commit.commands.init_templatedir import init_templatedir
@ -25,7 +24,7 @@ def test_init_templatedir(tmpdir, tempdir_factory, store, cap_out):
'[WARNING] maybe `git config --global init.templateDir', '[WARNING] maybe `git config --global init.templateDir',
) )
with envcontext([('GIT_TEMPLATE_DIR', target)]): with envcontext((('GIT_TEMPLATE_DIR', target),)):
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo') path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
with cwd(path): with cwd(path):
@ -53,7 +52,7 @@ def test_init_templatedir_already_set(tmpdir, tempdir_factory, store, cap_out):
def test_init_templatedir_not_set(tmpdir, store, cap_out): def test_init_templatedir_not_set(tmpdir, store, cap_out):
# set HOME to ignore the current `.gitconfig` # set HOME to ignore the current `.gitconfig`
with envcontext([('HOME', str(tmpdir))]): with envcontext((('HOME', str(tmpdir)),)):
with tmpdir.join('tmpl').ensure_dir().as_cwd(): with tmpdir.join('tmpl').ensure_dir().as_cwd():
# we have not set init.templateDir so this should produce a warning # we have not set init.templateDir so this should produce a warning
init_templatedir( init_templatedir(

View file

@ -1,13 +1,7 @@
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os.path import os.path
import re import re
import sys import sys
from unittest import mock
import mock
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.commands.install_uninstall import CURRENT_HASH from pre_commit.commands.install_uninstall import CURRENT_HASH
@ -123,7 +117,7 @@ def _get_commit_output(tempdir_factory, touch_file='foo', **kwargs):
fn=cmd_output_mocked_pre_commit_home, fn=cmd_output_mocked_pre_commit_home,
retcode=None, retcode=None,
tempdir_factory=tempdir_factory, tempdir_factory=tempdir_factory,
**kwargs **kwargs,
) )
@ -203,7 +197,7 @@ def test_commit_am(tempdir_factory, store):
open('unstaged', 'w').close() open('unstaged', 'w').close()
cmd_output('git', 'add', '.') cmd_output('git', 'add', '.')
git_commit(cwd=path) git_commit(cwd=path)
with io.open('unstaged', 'w') as foo_file: with open('unstaged', 'w') as foo_file:
foo_file.write('Oh hai') foo_file.write('Oh hai')
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0 assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
@ -314,7 +308,7 @@ EXISTING_COMMIT_RUN = re.compile(
def _write_legacy_hook(path): def _write_legacy_hook(path):
mkdirp(os.path.join(path, '.git/hooks')) mkdirp(os.path.join(path, '.git/hooks'))
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f: with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
f.write('#!/usr/bin/env bash\necho "legacy hook"\n') f.write('#!/usr/bin/env bash\necho "legacy hook"\n')
make_executable(f.name) make_executable(f.name)
@ -377,7 +371,7 @@ def test_failing_existing_hook_returns_1(tempdir_factory, store):
with cwd(path): with cwd(path):
# Write out a failing "old" hook # Write out a failing "old" hook
mkdirp(os.path.join(path, '.git/hooks')) mkdirp(os.path.join(path, '.git/hooks'))
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f: with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n') f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n')
make_executable(f.name) make_executable(f.name)
@ -439,7 +433,7 @@ def test_replace_old_commit_script(tempdir_factory, store):
) )
mkdirp(os.path.join(path, '.git/hooks')) mkdirp(os.path.join(path, '.git/hooks'))
with io.open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f: with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
f.write(new_contents) f.write(new_contents)
make_executable(f.name) make_executable(f.name)
@ -525,7 +519,7 @@ def _get_push_output(tempdir_factory, opts=()):
return cmd_output_mocked_pre_commit_home( return cmd_output_mocked_pre_commit_home(
'git', 'push', 'origin', 'HEAD:new_branch', *opts, 'git', 'push', 'origin', 'HEAD:new_branch', *opts,
tempdir_factory=tempdir_factory, tempdir_factory=tempdir_factory,
retcode=None retcode=None,
)[:2] )[:2]
@ -616,7 +610,7 @@ def test_pre_push_legacy(tempdir_factory, store):
cmd_output('git', 'clone', upstream, path) cmd_output('git', 'clone', upstream, path)
with cwd(path): with cwd(path):
mkdirp(os.path.join(path, '.git/hooks')) mkdirp(os.path.join(path, '.git/hooks'))
with io.open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f: with open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
f.write( f.write(
'#!/usr/bin/env bash\n' '#!/usr/bin/env bash\n'
'set -eu\n' 'set -eu\n'
@ -665,7 +659,7 @@ def test_commit_msg_integration_passing(
def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store): def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg') hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg')
mkdirp(os.path.dirname(hook_path)) mkdirp(os.path.dirname(hook_path))
with io.open(hook_path, 'w') as hook_file: with open(hook_path, 'w') as hook_file:
hook_file.write( hook_file.write(
'#!/usr/bin/env bash\n' '#!/usr/bin/env bash\n'
'set -eu\n' 'set -eu\n'
@ -709,7 +703,7 @@ def test_prepare_commit_msg_integration_passing(
commit_msg_path = os.path.join( commit_msg_path = os.path.join(
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG', prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
) )
with io.open(commit_msg_path) as f: with open(commit_msg_path) as f:
assert 'Signed off by: ' in f.read() assert 'Signed off by: ' in f.read()
@ -720,7 +714,7 @@ def test_prepare_commit_msg_legacy(
prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg', prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg',
) )
mkdirp(os.path.dirname(hook_path)) mkdirp(os.path.dirname(hook_path))
with io.open(hook_path, 'w') as hook_file: with open(hook_path, 'w') as hook_file:
hook_file.write( hook_file.write(
'#!/usr/bin/env bash\n' '#!/usr/bin/env bash\n'
'set -eu\n' 'set -eu\n'
@ -739,7 +733,7 @@ def test_prepare_commit_msg_legacy(
commit_msg_path = os.path.join( commit_msg_path = os.path.join(
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG', prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
) )
with io.open(commit_msg_path) as f: with open(commit_msg_path) as f:
assert 'Signed off by: ' in f.read() assert 'Signed off by: ' in f.read()

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C

View file

@ -1,13 +1,9 @@
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import io
import os.path import os.path
import pipes import pipes
import sys import sys
import time import time
from unittest import mock
import mock
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C
@ -154,7 +150,7 @@ def test_types_hook_repository(cap_out, store, tempdir_factory):
def test_exclude_types_hook_repository(cap_out, store, tempdir_factory): def test_exclude_types_hook_repository(cap_out, store, tempdir_factory):
git_path = make_consuming_repo(tempdir_factory, 'exclude_types_repo') git_path = make_consuming_repo(tempdir_factory, 'exclude_types_repo')
with cwd(git_path): with cwd(git_path):
with io.open('exe', 'w') as exe: with open('exe', 'w') as exe:
exe.write('#!/usr/bin/env python3\n') exe.write('#!/usr/bin/env python3\n')
make_executable('exe') make_executable('exe')
cmd_output('git', 'add', 'exe') cmd_output('git', 'add', 'exe')
@ -601,8 +597,8 @@ def test_stages(cap_out, store, repo_with_passing_hook):
'repo': 'local', 'repo': 'local',
'hooks': [ 'hooks': [
{ {
'id': 'do-not-commit-{}'.format(i), 'id': f'do-not-commit-{i}',
'name': 'hook {}'.format(i), 'name': f'hook {i}',
'entry': 'DO NOT COMMIT', 'entry': 'DO NOT COMMIT',
'language': 'pygrep', 'language': 'pygrep',
'stages': [stage], 'stages': [stage],
@ -636,7 +632,7 @@ def test_stages(cap_out, store, repo_with_passing_hook):
def test_commit_msg_hook(cap_out, store, commit_msg_repo): def test_commit_msg_hook(cap_out, store, commit_msg_repo):
filename = '.git/COMMIT_EDITMSG' filename = '.git/COMMIT_EDITMSG'
with io.open(filename, 'w') as f: with open(filename, 'w') as f:
f.write('This is the commit message') f.write('This is the commit message')
_test_run( _test_run(
@ -652,7 +648,7 @@ def test_commit_msg_hook(cap_out, store, commit_msg_repo):
def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo): def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo):
filename = '.git/COMMIT_EDITMSG' filename = '.git/COMMIT_EDITMSG'
with io.open(filename, 'w') as f: with open(filename, 'w') as f:
f.write('This is the commit message') f.write('This is the commit message')
_test_run( _test_run(
@ -665,7 +661,7 @@ def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo):
stage=False, stage=False,
) )
with io.open(filename) as f: with open(filename) as f:
assert 'Signed off by: ' in f.read() assert 'Signed off by: ' in f.read()
@ -692,7 +688,7 @@ def test_local_hook_passes(cap_out, store, repo_with_passing_hook):
} }
add_config_to_repo(repo_with_passing_hook, config) add_config_to_repo(repo_with_passing_hook, config)
with io.open('dummy.py', 'w') as staged_file: with open('dummy.py', 'w') as staged_file:
staged_file.write('"""TODO: something"""\n') staged_file.write('"""TODO: something"""\n')
cmd_output('git', 'add', 'dummy.py') cmd_output('git', 'add', 'dummy.py')
@ -719,7 +715,7 @@ def test_local_hook_fails(cap_out, store, repo_with_passing_hook):
} }
add_config_to_repo(repo_with_passing_hook, config) add_config_to_repo(repo_with_passing_hook, config)
with io.open('dummy.py', 'w') as staged_file: with open('dummy.py', 'w') as staged_file:
staged_file.write('"""TODO: something"""\n') staged_file.write('"""TODO: something"""\n')
cmd_output('git', 'add', 'dummy.py') cmd_output('git', 'add', 'dummy.py')

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
from pre_commit.commands.sample_config import sample_config from pre_commit.commands.sample_config import sample_config

View file

@ -1,11 +1,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
import re import re
import time import time
from unittest import mock
import mock
from pre_commit import git from pre_commit import git
from pre_commit.commands.try_repo import try_repo from pre_commit.commands.try_repo import try_repo

View file

@ -1,14 +1,10 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import functools import functools
import io import io
import logging import logging
import os.path import os.path
from unittest import mock
import mock
import pytest import pytest
import six
from pre_commit import output from pre_commit import output
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
@ -36,19 +32,19 @@ def no_warnings(recwarn):
' missing __init__' in message ' missing __init__' in message
): ):
warnings.append( warnings.append(
'{}:{} {}'.format(warning.filename, warning.lineno, message), f'{warning.filename}:{warning.lineno} {message}',
) )
assert not warnings assert not warnings
@pytest.fixture @pytest.fixture
def tempdir_factory(tmpdir): def tempdir_factory(tmpdir):
class TmpdirFactory(object): class TmpdirFactory:
def __init__(self): def __init__(self):
self.tmpdir_count = 0 self.tmpdir_count = 0
def get(self): def get(self):
path = tmpdir.join(six.text_type(self.tmpdir_count)).strpath path = tmpdir.join(str(self.tmpdir_count)).strpath
self.tmpdir_count += 1 self.tmpdir_count += 1
os.mkdir(path) os.mkdir(path)
return path return path
@ -73,18 +69,18 @@ def in_git_dir(tmpdir):
def _make_conflict(): def _make_conflict():
cmd_output('git', 'checkout', 'origin/master', '-b', 'foo') cmd_output('git', 'checkout', 'origin/master', '-b', 'foo')
with io.open('conflict_file', 'w') as conflict_file: with open('conflict_file', 'w') as conflict_file:
conflict_file.write('herp\nderp\n') conflict_file.write('herp\nderp\n')
cmd_output('git', 'add', 'conflict_file') cmd_output('git', 'add', 'conflict_file')
with io.open('foo_only_file', 'w') as foo_only_file: with open('foo_only_file', 'w') as foo_only_file:
foo_only_file.write('foo') foo_only_file.write('foo')
cmd_output('git', 'add', 'foo_only_file') cmd_output('git', 'add', 'foo_only_file')
git_commit(msg=_make_conflict.__name__) git_commit(msg=_make_conflict.__name__)
cmd_output('git', 'checkout', 'origin/master', '-b', 'bar') cmd_output('git', 'checkout', 'origin/master', '-b', 'bar')
with io.open('conflict_file', 'w') as conflict_file: with open('conflict_file', 'w') as conflict_file:
conflict_file.write('harp\nddrp\n') conflict_file.write('harp\nddrp\n')
cmd_output('git', 'add', 'conflict_file') cmd_output('git', 'add', 'conflict_file')
with io.open('bar_only_file', 'w') as bar_only_file: with open('bar_only_file', 'w') as bar_only_file:
bar_only_file.write('bar') bar_only_file.write('bar')
cmd_output('git', 'add', 'bar_only_file') cmd_output('git', 'add', 'bar_only_file')
git_commit(msg=_make_conflict.__name__) git_commit(msg=_make_conflict.__name__)
@ -145,14 +141,14 @@ def prepare_commit_msg_repo(tempdir_factory):
'hooks': [{ 'hooks': [{
'id': 'add-signoff', 'id': 'add-signoff',
'name': 'Add "Signed off by:"', 'name': 'Add "Signed off by:"',
'entry': './{}'.format(script_name), 'entry': f'./{script_name}',
'language': 'script', 'language': 'script',
'stages': ['prepare-commit-msg'], 'stages': ['prepare-commit-msg'],
}], }],
} }
write_config(path, config) write_config(path, config)
with cwd(path): with cwd(path):
with io.open(script_name, 'w') as script_file: with open(script_name, 'w') as script_file:
script_file.write( script_file.write(
'#!/usr/bin/env bash\n' '#!/usr/bin/env bash\n'
'set -eu\n' 'set -eu\n'
@ -229,7 +225,7 @@ def log_info_mock():
yield mck yield mck
class FakeStream(object): class FakeStream:
def __init__(self): def __init__(self):
self.data = io.BytesIO() self.data = io.BytesIO()
@ -240,7 +236,7 @@ class FakeStream(object):
pass pass
class Fixture(object): class Fixture:
def __init__(self, stream): def __init__(self, stream):
self._stream = stream self._stream = stream
@ -278,5 +274,5 @@ def fake_log_handler():
@pytest.fixture(scope='session', autouse=True) @pytest.fixture(scope='session', autouse=True)
def set_git_templatedir(tmpdir_factory): def set_git_templatedir(tmpdir_factory):
tdir = str(tmpdir_factory.mktemp('git_template_dir')) tdir = str(tmpdir_factory.mktemp('git_template_dir'))
with envcontext([('GIT_TEMPLATE_DIR', tdir)]): with envcontext((('GIT_TEMPLATE_DIR', tdir),)):
yield yield

View file

@ -1,9 +1,6 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os import os
from unittest import mock
import mock
import pytest import pytest
from pre_commit.envcontext import envcontext from pre_commit.envcontext import envcontext
@ -94,16 +91,16 @@ def test_exception_safety():
class MyError(RuntimeError): class MyError(RuntimeError):
pass pass
env = {} env = {'hello': 'world'}
with pytest.raises(MyError): with pytest.raises(MyError):
with envcontext([('foo', 'bar')], _env=env): with envcontext((('foo', 'bar'),), _env=env):
raise MyError() raise MyError()
assert env == {} assert env == {'hello': 'world'}
def test_integration_os_environ(): def test_integration_os_environ():
with mock.patch.dict(os.environ, {'FOO': 'bar'}, clear=True): with mock.patch.dict(os.environ, {'FOO': 'bar'}, clear=True):
assert os.environ == {'FOO': 'bar'} assert os.environ == {'FOO': 'bar'}
with envcontext([('HERP', 'derp')]): with envcontext((('HERP', 'derp'),)):
assert os.environ == {'FOO': 'bar', 'HERP': 'derp'} assert os.environ == {'FOO': 'bar', 'HERP': 'derp'}
assert os.environ == {'FOO': 'bar'} assert os.environ == {'FOO': 'bar'}

View file

@ -1,13 +1,8 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os.path import os.path
import re import re
import sys import sys
from unittest import mock
import mock
import pytest import pytest
from pre_commit import error_handler from pre_commit import error_handler
@ -109,7 +104,7 @@ def test_log_and_exit(cap_out, mock_store_dir):
) )
assert os.path.exists(log_file) assert os.path.exists(log_file)
with io.open(log_file) as f: with open(log_file) as f:
logged = f.read() logged = f.read()
expected = ( expected = (
r'^### version information\n' r'^### version information\n'
@ -158,4 +153,4 @@ def test_error_handler_no_tty(tempdir_factory):
log_file = os.path.join(pre_commit_home, 'pre-commit.log') log_file = os.path.join(pre_commit_home, 'pre-commit.log')
out_lines = out.splitlines() out_lines = out.splitlines()
assert out_lines[-2] == 'An unexpected error has occurred: ValueError: ☃' assert out_lines[-2] == 'An unexpected error has occurred: ValueError: ☃'
assert out_lines[-1] == 'Check the log at {}'.format(log_file) assert out_lines[-1] == f'Check the log at {log_file}'

View file

@ -1,7 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
import pytest import pytest

View file

@ -1,58 +0,0 @@
from __future__ import unicode_literals
import functools
import inspect
import pytest
import six
from pre_commit.languages.all import all_languages
from pre_commit.languages.all import languages
if six.PY2: # pragma: no cover
ArgSpec = functools.partial(
inspect.ArgSpec, varargs=None, keywords=None, defaults=None,
)
getargspec = inspect.getargspec
else: # pragma: no cover
ArgSpec = functools.partial(
inspect.FullArgSpec, varargs=None, varkw=None, defaults=None,
kwonlyargs=[], kwonlydefaults=None, annotations={},
)
getargspec = inspect.getfullargspec
@pytest.mark.parametrize('language', all_languages)
def test_install_environment_argspec(language):
expected_argspec = ArgSpec(
args=['prefix', 'version', 'additional_dependencies'],
)
argspec = getargspec(languages[language].install_environment)
assert argspec == expected_argspec
@pytest.mark.parametrize('language', all_languages)
def test_ENVIRONMENT_DIR(language):
assert hasattr(languages[language], 'ENVIRONMENT_DIR')
@pytest.mark.parametrize('language', all_languages)
def test_run_hook_argpsec(language):
expected_argspec = ArgSpec(args=['hook', 'file_args', 'color'])
argspec = getargspec(languages[language].run_hook)
assert argspec == expected_argspec
@pytest.mark.parametrize('language', all_languages)
def test_get_default_version_argspec(language):
expected_argspec = ArgSpec(args=[])
argspec = getargspec(languages[language].get_default_version)
assert argspec == expected_argspec
@pytest.mark.parametrize('language', all_languages)
def test_healthy_argspec(language):
expected_argspec = ArgSpec(args=['prefix', 'language_version'])
argspec = getargspec(languages[language].healthy)
assert argspec == expected_argspec

View file

@ -1,7 +1,4 @@
from __future__ import absolute_import from unittest import mock
from __future__ import unicode_literals
import mock
from pre_commit.languages import docker from pre_commit.languages import docker
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
@ -10,7 +7,7 @@ from pre_commit.util import CalledProcessError
def test_docker_is_running_process_error(): def test_docker_is_running_process_error():
with mock.patch( with mock.patch(
'pre_commit.languages.docker.cmd_output_b', 'pre_commit.languages.docker.cmd_output_b',
side_effect=CalledProcessError(None, None, None, None, None), side_effect=CalledProcessError(1, (), 0, b'', None),
): ):
assert docker.docker_is_running() is False assert docker.docker_is_running() is False

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import pytest import pytest
from pre_commit.languages.golang import guess_go_dir from pre_commit.languages.golang import guess_go_dir

View file

@ -1,11 +1,8 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import multiprocessing import multiprocessing
import os import os
import sys import sys
from unittest import mock
import mock
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C
@ -20,7 +17,7 @@ def test_basic_get_default_version():
def test_basic_healthy(): def test_basic_healthy():
assert helpers.basic_healthy(None, None) is True assert helpers.basic_healthy(Prefix('.'), 'default') is True
def test_failed_setup_command_does_not_unicode_error(): def test_failed_setup_command_does_not_unicode_error():
@ -80,4 +77,6 @@ def test_target_concurrency_cpu_count_not_implemented():
def test_shuffled_is_deterministic(): def test_shuffled_is_deterministic():
assert helpers._shuffled(range(10)) == [3, 7, 8, 2, 4, 6, 5, 1, 0, 9] seq = [str(i) for i in range(10)]
expected = ['3', '7', '8', '2', '4', '6', '5', '1', '0', '9']
assert helpers._shuffled(seq) == expected

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import pytest import pytest
from pre_commit.languages import pygrep from pre_commit.languages import pygrep

View file

@ -1,10 +1,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
import sys import sys
from unittest import mock
import mock
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C
@ -16,7 +13,7 @@ def test_norm_version_expanduser():
home = os.path.expanduser('~') home = os.path.expanduser('~')
if os.name == 'nt': # pragma: no cover (nt) if os.name == 'nt': # pragma: no cover (nt)
path = r'~\python343' path = r'~\python343'
expected_path = r'{}\python343'.format(home) expected_path = fr'{home}\python343'
else: # pragma: windows no cover else: # pragma: windows no cover
path = '~/.pyenv/versions/3.4.3/bin/python' path = '~/.pyenv/versions/3.4.3/bin/python'
expected_path = home + '/.pyenv/versions/3.4.3/bin/python' expected_path = home + '/.pyenv/versions/3.4.3/bin/python'

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import os.path import os.path
import pipes import pipes

View file

@ -1,27 +1,21 @@
from __future__ import unicode_literals import logging
from pre_commit import color from pre_commit import color
from pre_commit.logging_handler import LoggingHandler from pre_commit.logging_handler import LoggingHandler
class FakeLogRecord(object): def _log_record(message, level):
def __init__(self, message, levelname, levelno): return logging.LogRecord('name', level, '', 1, message, {}, None)
self.message = message
self.levelname = levelname
self.levelno = levelno
def getMessage(self):
return self.message
def test_logging_handler_color(cap_out): def test_logging_handler_color(cap_out):
handler = LoggingHandler(True) handler = LoggingHandler(True)
handler.emit(FakeLogRecord('hi', 'WARNING', 30)) handler.emit(_log_record('hi', logging.WARNING))
ret = cap_out.get() ret = cap_out.get()
assert ret == color.YELLOW + '[WARNING]' + color.NORMAL + ' hi\n' assert ret == color.YELLOW + '[WARNING]' + color.NORMAL + ' hi\n'
def test_logging_handler_no_color(cap_out): def test_logging_handler_no_color(cap_out):
handler = LoggingHandler(False) handler = LoggingHandler(False)
handler.emit(FakeLogRecord('hi', 'WARNING', 30)) handler.emit(_log_record('hi', logging.WARNING))
assert cap_out.get() == '[WARNING] hi\n' assert cap_out.get() == '[WARNING] hi\n'

View file

@ -1,10 +1,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse import argparse
import os.path import os.path
from unittest import mock
import mock
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C
@ -27,25 +24,24 @@ def test_append_replace_default(argv, expected):
assert parser.parse_args(argv).f == expected assert parser.parse_args(argv).f == expected
class Args(object): def _args(**kwargs):
def __init__(self, **kwargs): kwargs.setdefault('command', 'help')
kwargs.setdefault('command', 'help') kwargs.setdefault('config', C.CONFIG_FILE)
kwargs.setdefault('config', C.CONFIG_FILE) return argparse.Namespace(**kwargs)
self.__dict__.update(kwargs)
def test_adjust_args_and_chdir_not_in_git_dir(in_tmpdir): def test_adjust_args_and_chdir_not_in_git_dir(in_tmpdir):
with pytest.raises(FatalError): with pytest.raises(FatalError):
main._adjust_args_and_chdir(Args()) main._adjust_args_and_chdir(_args())
def test_adjust_args_and_chdir_in_dot_git_dir(in_git_dir): def test_adjust_args_and_chdir_in_dot_git_dir(in_git_dir):
with in_git_dir.join('.git').as_cwd(), pytest.raises(FatalError): with in_git_dir.join('.git').as_cwd(), pytest.raises(FatalError):
main._adjust_args_and_chdir(Args()) main._adjust_args_and_chdir(_args())
def test_adjust_args_and_chdir_noop(in_git_dir): def test_adjust_args_and_chdir_noop(in_git_dir):
args = Args(command='run', files=['f1', 'f2']) args = _args(command='run', files=['f1', 'f2'])
main._adjust_args_and_chdir(args) main._adjust_args_and_chdir(args)
assert os.getcwd() == in_git_dir assert os.getcwd() == in_git_dir
assert args.config == C.CONFIG_FILE assert args.config == C.CONFIG_FILE
@ -56,7 +52,7 @@ def test_adjust_args_and_chdir_relative_things(in_git_dir):
in_git_dir.join('foo/cfg.yaml').ensure() in_git_dir.join('foo/cfg.yaml').ensure()
in_git_dir.join('foo').chdir() in_git_dir.join('foo').chdir()
args = Args(command='run', files=['f1', 'f2'], config='cfg.yaml') args = _args(command='run', files=['f1', 'f2'], config='cfg.yaml')
main._adjust_args_and_chdir(args) main._adjust_args_and_chdir(args)
assert os.getcwd() == in_git_dir assert os.getcwd() == in_git_dir
assert args.config == os.path.join('foo', 'cfg.yaml') assert args.config == os.path.join('foo', 'cfg.yaml')
@ -66,7 +62,7 @@ def test_adjust_args_and_chdir_relative_things(in_git_dir):
def test_adjust_args_and_chdir_non_relative_config(in_git_dir): def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
in_git_dir.join('foo').ensure_dir().chdir() in_git_dir.join('foo').ensure_dir().chdir()
args = Args() args = _args()
main._adjust_args_and_chdir(args) main._adjust_args_and_chdir(args)
assert os.getcwd() == in_git_dir assert os.getcwd() == in_git_dir
assert args.config == C.CONFIG_FILE assert args.config == C.CONFIG_FILE
@ -75,7 +71,8 @@ def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
def test_adjust_args_try_repo_repo_relative(in_git_dir): def test_adjust_args_try_repo_repo_relative(in_git_dir):
in_git_dir.join('foo').ensure_dir().chdir() in_git_dir.join('foo').ensure_dir().chdir()
args = Args(command='try-repo', repo='../foo', files=[]) args = _args(command='try-repo', repo='../foo', files=[])
assert args.repo is not None
assert os.path.exists(args.repo) assert os.path.exists(args.repo)
main._adjust_args_and_chdir(args) main._adjust_args_and_chdir(args)
assert os.getcwd() == in_git_dir assert os.getcwd() == in_git_dir
@ -189,4 +186,4 @@ def test_expected_fatal_error_no_git_repo(in_tmpdir, cap_out, mock_store_dir):
'An error has occurred: FatalError: git failed. ' 'An error has occurred: FatalError: git failed. '
'Is it installed, and are you in a Git repository directory?' 'Is it installed, and are you in a Git repository directory?'
) )
assert cap_out_lines[-1] == 'Check the log at {}'.format(log_file) assert cap_out_lines[-1] == f'Check the log at {log_file}'

View file

@ -1,6 +1,3 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import tarfile import tarfile
from pre_commit import git from pre_commit import git
@ -46,4 +43,4 @@ def test_main(tmpdir):
make_archives.main(('--dest', tmpdir.strpath)) make_archives.main(('--dest', tmpdir.strpath))
for archive, _, _ in make_archives.REPOS: for archive, _, _ in make_archives.REPOS:
assert tmpdir.join('{}.tar.gz'.format(archive)).exists() assert tmpdir.join(f'{archive}.tar.gz').exists()

View file

@ -1,6 +1,5 @@
from __future__ import unicode_literals from unittest import mock
import mock
import pytest import pytest
from pre_commit import color from pre_commit import color
@ -23,7 +22,7 @@ from pre_commit import output
), ),
) )
def test_get_hook_message_raises(kwargs): def test_get_hook_message_raises(kwargs):
with pytest.raises(ValueError): with pytest.raises(AssertionError):
output.get_hook_message('start', **kwargs) output.get_hook_message('start', **kwargs)

View file

@ -1,9 +1,5 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib import contextlib
import distutils.spawn import distutils.spawn
import io
import os import os
import sys import sys
@ -15,6 +11,12 @@ from pre_commit.envcontext import Var
from pre_commit.util import make_executable from pre_commit.util import make_executable
def _echo_exe() -> str:
exe = distutils.spawn.find_executable('echo')
assert exe is not None
return exe
def test_file_doesnt_exist(): def test_file_doesnt_exist():
assert parse_shebang.parse_filename('herp derp derp') == () assert parse_shebang.parse_filename('herp derp derp') == ()
@ -31,8 +33,7 @@ def test_find_executable_full_path():
def test_find_executable_on_path(): def test_find_executable_on_path():
expected = distutils.spawn.find_executable('echo') assert parse_shebang.find_executable('echo') == _echo_exe()
assert parse_shebang.find_executable('echo') == expected
def test_find_executable_not_found_none(): def test_find_executable_not_found_none():
@ -42,8 +43,8 @@ def test_find_executable_not_found_none():
def write_executable(shebang, filename='run'): def write_executable(shebang, filename='run'):
os.mkdir('bin') os.mkdir('bin')
path = os.path.join('bin', filename) path = os.path.join('bin', filename)
with io.open(path, 'w') as f: with open(path, 'w') as f:
f.write('#!{}'.format(shebang)) f.write(f'#!{shebang}')
make_executable(path) make_executable(path)
return path return path
@ -106,7 +107,7 @@ def test_normexe_is_a_directory(tmpdir):
with pytest.raises(OSError) as excinfo: with pytest.raises(OSError) as excinfo:
parse_shebang.normexe(exe) parse_shebang.normexe(exe)
msg, = excinfo.value.args msg, = excinfo.value.args
assert msg == 'Executable `{}` is a directory'.format(exe) assert msg == f'Executable `{exe}` is a directory'
def test_normexe_already_full_path(): def test_normexe_already_full_path():
@ -114,30 +115,29 @@ def test_normexe_already_full_path():
def test_normexe_gives_full_path(): def test_normexe_gives_full_path():
expected = distutils.spawn.find_executable('echo') assert parse_shebang.normexe('echo') == _echo_exe()
assert parse_shebang.normexe('echo') == expected assert os.sep in _echo_exe()
assert os.sep in expected
def test_normalize_cmd_trivial(): def test_normalize_cmd_trivial():
cmd = (distutils.spawn.find_executable('echo'), 'hi') cmd = (_echo_exe(), 'hi')
assert parse_shebang.normalize_cmd(cmd) == cmd assert parse_shebang.normalize_cmd(cmd) == cmd
def test_normalize_cmd_PATH(): def test_normalize_cmd_PATH():
cmd = ('echo', '--version') cmd = ('echo', '--version')
expected = (distutils.spawn.find_executable('echo'), '--version') expected = (_echo_exe(), '--version')
assert parse_shebang.normalize_cmd(cmd) == expected assert parse_shebang.normalize_cmd(cmd) == expected
def test_normalize_cmd_shebang(in_tmpdir): def test_normalize_cmd_shebang(in_tmpdir):
echo = distutils.spawn.find_executable('echo').replace(os.sep, '/') echo = _echo_exe().replace(os.sep, '/')
path = write_executable(echo) path = write_executable(echo)
assert parse_shebang.normalize_cmd((path,)) == (echo, path) assert parse_shebang.normalize_cmd((path,)) == (echo, path)
def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir): def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
echo = distutils.spawn.find_executable('echo').replace(os.sep, '/') echo = _echo_exe().replace(os.sep, '/')
path = write_executable(echo) path = write_executable(echo)
with bin_on_path(): with bin_on_path():
ret = parse_shebang.normalize_cmd(('run',)) ret = parse_shebang.normalize_cmd(('run',))
@ -145,7 +145,7 @@ def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir): def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir):
echo = distutils.spawn.find_executable('echo') echo = _echo_exe()
path = write_executable('/usr/bin/env echo') path = write_executable('/usr/bin/env echo')
with bin_on_path(): with bin_on_path():
ret = parse_shebang.normalize_cmd(('run',)) ret = parse_shebang.normalize_cmd(('run',))

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import os.path import os.path
import pytest import pytest

View file

@ -1,13 +1,12 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path import os.path
import re import re
import shutil import shutil
import sys import sys
from typing import Any
from typing import Dict
from unittest import mock
import cfgv import cfgv
import mock
import pytest import pytest
import pre_commit.constants as C import pre_commit.constants as C
@ -312,7 +311,7 @@ def test_golang_hook(tempdir_factory, store):
def test_golang_hook_still_works_when_gobin_is_set(tempdir_factory, store): def test_golang_hook_still_works_when_gobin_is_set(tempdir_factory, store):
gobin_dir = tempdir_factory.get() gobin_dir = tempdir_factory.get()
with envcontext([('GOBIN', gobin_dir)]): with envcontext((('GOBIN', gobin_dir),)):
test_golang_hook(tempdir_factory, store) test_golang_hook(tempdir_factory, store)
assert os.listdir(gobin_dir) == [] assert os.listdir(gobin_dir) == []
@ -473,7 +472,7 @@ def _norm_pwd(path):
# Under windows bash's temp and windows temp is different. # Under windows bash's temp and windows temp is different.
# This normalizes to the bash /tmp # This normalizes to the bash /tmp
return cmd_output_b( return cmd_output_b(
'bash', '-c', "cd '{}' && pwd".format(path), 'bash', '-c', f"cd '{path}' && pwd",
)[1].strip() )[1].strip()
@ -766,7 +765,7 @@ def test_local_python_repo(store, local_python_config):
def test_default_language_version(store, local_python_config): def test_default_language_version(store, local_python_config):
config = { config: Dict[str, Any] = {
'default_language_version': {'python': 'fake'}, 'default_language_version': {'python': 'fake'},
'default_stages': ['commit'], 'default_stages': ['commit'],
'repos': [local_python_config], 'repos': [local_python_config],
@ -783,7 +782,7 @@ def test_default_language_version(store, local_python_config):
def test_default_stages(store, local_python_config): def test_default_stages(store, local_python_config):
config = { config: Dict[str, Any] = {
'default_language_version': {'python': C.DEFAULT}, 'default_language_version': {'python': C.DEFAULT},
'default_stages': ['commit'], 'default_stages': ['commit'],
'repos': [local_python_config], 'repos': [local_python_config],
@ -844,7 +843,7 @@ def test_manifest_hooks(tempdir_factory, store):
hook = _get_hook(config, store, 'bash_hook') hook = _get_hook(config, store, 'bash_hook')
assert hook == Hook( assert hook == Hook(
src='file://{}'.format(path), src=f'file://{path}',
prefix=Prefix(mock.ANY), prefix=Prefix(mock.ANY),
additional_dependencies=[], additional_dependencies=[],
alias='', alias='',

View file

@ -1,8 +1,3 @@
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import itertools import itertools
import os.path import os.path
import shutil import shutil
@ -29,7 +24,8 @@ def patch_dir(tempdir_factory):
def get_short_git_status(): def get_short_git_status():
git_status = cmd_output('git', 'status', '-s')[1] git_status = cmd_output('git', 'status', '-s')[1]
return dict(reversed(line.split()) for line in git_status.splitlines()) line_parts = [line.split() for line in git_status.splitlines()]
return {v: k for k, v in line_parts}
@pytest.fixture @pytest.fixture
@ -47,7 +43,7 @@ def _test_foo_state(
encoding='UTF-8', encoding='UTF-8',
): ):
assert os.path.exists(path.foo_filename) assert os.path.exists(path.foo_filename)
with io.open(path.foo_filename, encoding=encoding) as f: with open(path.foo_filename, encoding=encoding) as f:
assert f.read() == foo_contents assert f.read() == foo_contents
actual_status = get_short_git_status()['foo'] actual_status = get_short_git_status()['foo']
assert status == actual_status assert status == actual_status
@ -64,7 +60,7 @@ def test_foo_nothing_unstaged(foo_staged, patch_dir):
def test_foo_something_unstaged(foo_staged, patch_dir): def test_foo_something_unstaged(foo_staged, patch_dir):
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write('herp\nderp\n') foo_file.write('herp\nderp\n')
_test_foo_state(foo_staged, 'herp\nderp\n', 'AM') _test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
@ -76,7 +72,7 @@ def test_foo_something_unstaged(foo_staged, patch_dir):
def test_does_not_crash_patch_dir_does_not_exist(foo_staged, patch_dir): def test_does_not_crash_patch_dir_does_not_exist(foo_staged, patch_dir):
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write('hello\nworld\n') foo_file.write('hello\nworld\n')
shutil.rmtree(patch_dir) shutil.rmtree(patch_dir)
@ -97,7 +93,7 @@ def test_foo_something_unstaged_diff_color_always(foo_staged, patch_dir):
def test_foo_both_modify_non_conflicting(foo_staged, patch_dir): def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write(FOO_CONTENTS + '9\n') foo_file.write(FOO_CONTENTS + '9\n')
_test_foo_state(foo_staged, FOO_CONTENTS + '9\n', 'AM') _test_foo_state(foo_staged, FOO_CONTENTS + '9\n', 'AM')
@ -106,7 +102,7 @@ def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
_test_foo_state(foo_staged) _test_foo_state(foo_staged)
# Modify the file as part of the "pre-commit" # Modify the file as part of the "pre-commit"
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write(FOO_CONTENTS.replace('1', 'a')) foo_file.write(FOO_CONTENTS.replace('1', 'a'))
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM') _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
@ -115,7 +111,7 @@ def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
def test_foo_both_modify_conflicting(foo_staged, patch_dir): def test_foo_both_modify_conflicting(foo_staged, patch_dir):
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write(FOO_CONTENTS.replace('1', 'a')) foo_file.write(FOO_CONTENTS.replace('1', 'a'))
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM') _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
@ -124,7 +120,7 @@ def test_foo_both_modify_conflicting(foo_staged, patch_dir):
_test_foo_state(foo_staged) _test_foo_state(foo_staged)
# Modify in the same place as the stashed diff # Modify in the same place as the stashed diff
with io.open(foo_staged.foo_filename, 'w') as foo_file: with open(foo_staged.foo_filename, 'w') as foo_file:
foo_file.write(FOO_CONTENTS.replace('1', 'b')) foo_file.write(FOO_CONTENTS.replace('1', 'b'))
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'b'), 'AM') _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'b'), 'AM')
@ -142,8 +138,8 @@ def img_staged(in_git_dir):
def _test_img_state(path, expected_file='img1.jpg', status='A'): def _test_img_state(path, expected_file='img1.jpg', status='A'):
assert os.path.exists(path.img_filename) assert os.path.exists(path.img_filename)
with io.open(path.img_filename, 'rb') as f1: with open(path.img_filename, 'rb') as f1:
with io.open(get_resource_path(expected_file), 'rb') as f2: with open(get_resource_path(expected_file), 'rb') as f2:
assert f1.read() == f2.read() assert f1.read() == f2.read()
actual_status = get_short_git_status()['img.jpg'] actual_status = get_short_git_status()['img.jpg']
assert status == actual_status assert status == actual_status
@ -248,7 +244,7 @@ def test_sub_something_unstaged(sub_staged, patch_dir):
def test_stage_utf8_changes(foo_staged, patch_dir): def test_stage_utf8_changes(foo_staged, patch_dir):
contents = '\u2603' contents = '\u2603'
with io.open('foo', 'w', encoding='UTF-8') as foo_file: with open('foo', 'w', encoding='UTF-8') as foo_file:
foo_file.write(contents) foo_file.write(contents)
_test_foo_state(foo_staged, contents, 'AM') _test_foo_state(foo_staged, contents, 'AM')
@ -260,7 +256,7 @@ def test_stage_utf8_changes(foo_staged, patch_dir):
def test_stage_non_utf8_changes(foo_staged, patch_dir): def test_stage_non_utf8_changes(foo_staged, patch_dir):
contents = 'ú' contents = 'ú'
# Produce a latin-1 diff # Produce a latin-1 diff
with io.open('foo', 'w', encoding='latin-1') as foo_file: with open('foo', 'w', encoding='latin-1') as foo_file:
foo_file.write(contents) foo_file.write(contents)
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1') _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
@ -282,14 +278,14 @@ def test_non_utf8_conflicting_diff(foo_staged, patch_dir):
# Previously, the error message (though discarded immediately) was being # Previously, the error message (though discarded immediately) was being
# decoded with the UTF-8 codec (causing a crash) # decoded with the UTF-8 codec (causing a crash)
contents = 'ú \n' contents = 'ú \n'
with io.open('foo', 'w', encoding='latin-1') as foo_file: with open('foo', 'w', encoding='latin-1') as foo_file:
foo_file.write(contents) foo_file.write(contents)
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1') _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
with staged_files_only(patch_dir): with staged_files_only(patch_dir):
_test_foo_state(foo_staged) _test_foo_state(foo_staged)
# Create a conflicting diff that will need to be rolled back # Create a conflicting diff that will need to be rolled back
with io.open('foo', 'w') as foo_file: with open('foo', 'w') as foo_file:
foo_file.write('') foo_file.write('')
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1') _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')

View file

@ -1,13 +1,8 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os.path import os.path
import sqlite3 import sqlite3
from unittest import mock
import mock
import pytest import pytest
import six
from pre_commit import git from pre_commit import git
from pre_commit.store import _get_default_directory from pre_commit.store import _get_default_directory
@ -53,7 +48,7 @@ def test_store_init(store):
# Should create the store directory # Should create the store directory
assert os.path.exists(store.directory) assert os.path.exists(store.directory)
# Should create a README file indicating what the directory is about # Should create a README file indicating what the directory is about
with io.open(os.path.join(store.directory, 'README')) as readme_file: with open(os.path.join(store.directory, 'README')) as readme_file:
readme_contents = readme_file.read() readme_contents = readme_file.read()
for text_line in ( for text_line in (
'This directory is maintained by the pre-commit project.', 'This directory is maintained by the pre-commit project.',
@ -93,7 +88,7 @@ def test_clone_cleans_up_on_checkout_failure(store):
# This raises an exception because you can't clone something that # This raises an exception because you can't clone something that
# doesn't exist! # doesn't exist!
store.clone('/i_dont_exist_lol', 'fake_rev') store.clone('/i_dont_exist_lol', 'fake_rev')
assert '/i_dont_exist_lol' in six.text_type(excinfo.value) assert '/i_dont_exist_lol' in str(excinfo.value)
repo_dirs = [ repo_dirs = [
d for d in os.listdir(store.directory) if d.startswith('repo') d for d in os.listdir(store.directory) if d.startswith('repo')
@ -125,7 +120,7 @@ def test_clone_shallow_failure_fallback_to_complete(
# Force shallow clone failure # Force shallow clone failure
def fake_shallow_clone(self, *args, **kwargs): def fake_shallow_clone(self, *args, **kwargs):
raise CalledProcessError(None, None, None, None, None) raise CalledProcessError(1, (), 0, b'', None)
store._shallow_clone = fake_shallow_clone store._shallow_clone = fake_shallow_clone
ret = store.clone(path, rev) ret = store.clone(path, rev)

View file

@ -1,5 +1,3 @@
from __future__ import unicode_literals
import os.path import os.path
import stat import stat
import subprocess import subprocess
@ -17,9 +15,9 @@ from pre_commit.util import tmpdir
def test_CalledProcessError_str(): def test_CalledProcessError_str():
error = CalledProcessError(1, [str('exe')], 0, b'output', b'errors') error = CalledProcessError(1, ('exe',), 0, b'output', b'errors')
assert str(error) == ( assert str(error) == (
"command: ['exe']\n" "command: ('exe',)\n"
'return code: 1\n' 'return code: 1\n'
'expected return code: 0\n' 'expected return code: 0\n'
'stdout:\n' 'stdout:\n'
@ -30,9 +28,9 @@ def test_CalledProcessError_str():
def test_CalledProcessError_str_nooutput(): def test_CalledProcessError_str_nooutput():
error = CalledProcessError(1, [str('exe')], 0, b'', b'') error = CalledProcessError(1, ('exe',), 0, b'', b'')
assert str(error) == ( assert str(error) == (
"command: ['exe']\n" "command: ('exe',)\n"
'return code: 1\n' 'return code: 1\n'
'expected return code: 0\n' 'expected return code: 0\n'
'stdout: (none)\n' 'stdout: (none)\n'

View file

@ -1,15 +1,11 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import concurrent.futures import concurrent.futures
import os import os
import sys import sys
import time import time
from typing import Tuple
from unittest import mock
import mock
import pytest import pytest
import six
from pre_commit import parse_shebang from pre_commit import parse_shebang
from pre_commit import xargs from pre_commit import xargs
@ -30,19 +26,10 @@ def test_environ_size(env, expected):
@pytest.fixture @pytest.fixture
def win32_py2_mock(): def win32_mock():
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'): with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
with mock.patch.object(sys, 'platform', 'win32'): with mock.patch.object(sys, 'platform', 'win32'):
with mock.patch.object(six, 'PY2', True): yield
yield
@pytest.fixture
def win32_py3_mock():
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
with mock.patch.object(sys, 'platform', 'win32'):
with mock.patch.object(six, 'PY2', False):
yield
@pytest.fixture @pytest.fixture
@ -82,7 +69,7 @@ def test_partition_limits():
) )
def test_partition_limit_win32_py3(win32_py3_mock): def test_partition_limit_win32(win32_mock):
cmd = ('ninechars',) cmd = ('ninechars',)
# counted as half because of utf-16 encode # counted as half because of utf-16 encode
varargs = ('😑' * 5,) varargs = ('😑' * 5,)
@ -90,13 +77,6 @@ def test_partition_limit_win32_py3(win32_py3_mock):
assert ret == (cmd + varargs,) assert ret == (cmd + varargs,)
def test_partition_limit_win32_py2(win32_py2_mock):
cmd = ('ninechars',)
varargs = ('😑' * 5,) # 4 bytes * 5
ret = xargs.partition(cmd, varargs, 1, _max_length=31)
assert ret == (cmd + varargs,)
def test_partition_limit_linux(linux_mock): def test_partition_limit_linux(linux_mock):
cmd = ('ninechars',) cmd = ('ninechars',)
varargs = ('😑' * 5,) varargs = ('😑' * 5,)
@ -187,9 +167,8 @@ def test_xargs_concurrency():
def test_thread_mapper_concurrency_uses_threadpoolexecutor_map(): def test_thread_mapper_concurrency_uses_threadpoolexecutor_map():
with xargs._thread_mapper(10) as thread_map: with xargs._thread_mapper(10) as thread_map:
assert isinstance( _self = thread_map.__self__ # type: ignore
thread_map.__self__, concurrent.futures.ThreadPoolExecutor, assert isinstance(_self, concurrent.futures.ThreadPoolExecutor)
) is True
def test_thread_mapper_concurrency_uses_regular_map(): def test_thread_mapper_concurrency_uses_regular_map():
@ -199,7 +178,7 @@ def test_thread_mapper_concurrency_uses_regular_map():
def test_xargs_propagate_kwargs_to_cmd(): def test_xargs_propagate_kwargs_to_cmd():
env = {'PRE_COMMIT_TEST_VAR': 'Pre commit is awesome'} env = {'PRE_COMMIT_TEST_VAR': 'Pre commit is awesome'}
cmd = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--') cmd: Tuple[str, ...] = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--')
cmd = parse_shebang.normalize_cmd(cmd) cmd = parse_shebang.normalize_cmd(cmd)
ret, stdout = xargs.xargs(cmd, ('1',), env=env) ret, stdout = xargs.xargs(cmd, ('1',), env=env)

Some files were not shown because too many files have changed in this diff Show more