mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-19 09:04:41 +04:00
Merge pull request #978 from pre-commit/all-repos_autofix_pre-commit-autoupdate
Run pre-commit autoupdate
This commit is contained in:
commit
da00fa98f9
11 changed files with 125 additions and 101 deletions
|
|
@ -12,7 +12,7 @@ repos:
|
||||||
- id: requirements-txt-fixer
|
- id: requirements-txt-fixer
|
||||||
- id: double-quote-string-fixer
|
- id: double-quote-string-fixer
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
rev: 3.7.1
|
rev: 3.7.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
- repo: https://github.com/pre-commit/mirrors-autopep8
|
- repo: https://github.com/pre-commit/mirrors-autopep8
|
||||||
|
|
@ -20,20 +20,20 @@ repos:
|
||||||
hooks:
|
hooks:
|
||||||
- id: autopep8
|
- id: autopep8
|
||||||
- repo: https://github.com/pre-commit/pre-commit
|
- repo: https://github.com/pre-commit/pre-commit
|
||||||
rev: v1.14.2
|
rev: v1.14.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate_manifest
|
- id: validate_manifest
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v1.11.1
|
rev: v1.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
rev: v1.3.5
|
rev: v1.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
language_version: python3
|
language_version: python3
|
||||||
- repo: https://github.com/asottile/add-trailing-comma
|
- repo: https://github.com/asottile/add-trailing-comma
|
||||||
rev: v0.7.1
|
rev: v1.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: add-trailing-comma
|
- id: add-trailing-comma
|
||||||
- repo: meta
|
- repo: meta
|
||||||
|
|
|
||||||
|
|
@ -84,9 +84,11 @@ def _write_new_config_file(path, output):
|
||||||
new_contents = ordered_dump(output, **C.YAML_DUMP_KWARGS)
|
new_contents = ordered_dump(output, **C.YAML_DUMP_KWARGS)
|
||||||
|
|
||||||
lines = original_contents.splitlines(True)
|
lines = original_contents.splitlines(True)
|
||||||
rev_line_indices_reversed = list(reversed([
|
rev_line_indices_reversed = list(
|
||||||
i for i, line in enumerate(lines) if REV_LINE_RE.match(line)
|
reversed([
|
||||||
]))
|
i for i, line in enumerate(lines) if REV_LINE_RE.match(line)
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
|
||||||
for line in new_contents.splitlines(True):
|
for line in new_contents.splitlines(True):
|
||||||
if REV_LINE_RE.match(line):
|
if REV_LINE_RE.match(line):
|
||||||
|
|
@ -140,9 +142,11 @@ def autoupdate(config_file, store, tags_only, repos=()):
|
||||||
|
|
||||||
if new_repo_config['rev'] != repo_config['rev']:
|
if new_repo_config['rev'] != repo_config['rev']:
|
||||||
changed = True
|
changed = True
|
||||||
output.write_line('updating {} -> {}.'.format(
|
output.write_line(
|
||||||
repo_config['rev'], new_repo_config['rev'],
|
'updating {} -> {}.'.format(
|
||||||
))
|
repo_config['rev'], new_repo_config['rev'],
|
||||||
|
),
|
||||||
|
)
|
||||||
output_repos.append(new_repo_config)
|
output_repos.append(new_repo_config)
|
||||||
else:
|
else:
|
||||||
output.write_line('already up to date.')
|
output.write_line('already up to date.')
|
||||||
|
|
|
||||||
|
|
@ -53,9 +53,7 @@ def shebang():
|
||||||
# Homebrew/homebrew-core#35825: be more timid about appropriate `PATH`
|
# Homebrew/homebrew-core#35825: be more timid about appropriate `PATH`
|
||||||
path_choices = [p for p in os.defpath.split(os.pathsep) if p]
|
path_choices = [p for p in os.defpath.split(os.pathsep) if p]
|
||||||
exe_choices = [
|
exe_choices = [
|
||||||
'python{}'.format('.'.join(
|
'python{}'.format('.'.join(str(v) for v in sys.version_info[:i]))
|
||||||
str(v) for v in sys.version_info[:i]
|
|
||||||
))
|
|
||||||
for i in range(3)
|
for i in range(3)
|
||||||
]
|
]
|
||||||
for path, exe in itertools.product(path_choices, exe_choices):
|
for path, exe in itertools.product(path_choices, exe_choices):
|
||||||
|
|
|
||||||
|
|
@ -85,30 +85,36 @@ def _run_single_hook(classifier, hook, args, skips, cols):
|
||||||
)
|
)
|
||||||
|
|
||||||
if hook.id in skips or hook.alias in skips:
|
if hook.id in skips or hook.alias in skips:
|
||||||
output.write(get_hook_message(
|
output.write(
|
||||||
_hook_msg_start(hook, args.verbose),
|
get_hook_message(
|
||||||
end_msg=SKIPPED,
|
_hook_msg_start(hook, args.verbose),
|
||||||
end_color=color.YELLOW,
|
end_msg=SKIPPED,
|
||||||
use_color=args.color,
|
end_color=color.YELLOW,
|
||||||
cols=cols,
|
use_color=args.color,
|
||||||
))
|
cols=cols,
|
||||||
|
),
|
||||||
|
)
|
||||||
return 0
|
return 0
|
||||||
elif not filenames and not hook.always_run:
|
elif not filenames and not hook.always_run:
|
||||||
output.write(get_hook_message(
|
output.write(
|
||||||
_hook_msg_start(hook, args.verbose),
|
get_hook_message(
|
||||||
postfix=NO_FILES,
|
_hook_msg_start(hook, args.verbose),
|
||||||
end_msg=SKIPPED,
|
postfix=NO_FILES,
|
||||||
end_color=color.TURQUOISE,
|
end_msg=SKIPPED,
|
||||||
use_color=args.color,
|
end_color=color.TURQUOISE,
|
||||||
cols=cols,
|
use_color=args.color,
|
||||||
))
|
cols=cols,
|
||||||
|
),
|
||||||
|
)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# Print the hook and the dots first in case the hook takes hella long to
|
# Print the hook and the dots first in case the hook takes hella long to
|
||||||
# run.
|
# run.
|
||||||
output.write(get_hook_message(
|
output.write(
|
||||||
_hook_msg_start(hook, args.verbose), end_len=6, cols=cols,
|
get_hook_message(
|
||||||
))
|
_hook_msg_start(hook, args.verbose), end_len=6, cols=cols,
|
||||||
|
),
|
||||||
|
)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
diff_before = cmd_output(
|
diff_before = cmd_output(
|
||||||
|
|
|
||||||
|
|
@ -84,20 +84,24 @@ def get_conflicted_files():
|
||||||
# If they resolved the merge conflict by choosing a mesh of both sides
|
# If they resolved the merge conflict by choosing a mesh of both sides
|
||||||
# this will also include the conflicted files
|
# this will also include the conflicted files
|
||||||
tree_hash = cmd_output('git', 'write-tree')[1].strip()
|
tree_hash = cmd_output('git', 'write-tree')[1].strip()
|
||||||
merge_diff_filenames = zsplit(cmd_output(
|
merge_diff_filenames = zsplit(
|
||||||
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
cmd_output(
|
||||||
'-m', tree_hash, 'HEAD', 'MERGE_HEAD',
|
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
||||||
)[1])
|
'-m', tree_hash, 'HEAD', 'MERGE_HEAD',
|
||||||
|
)[1],
|
||||||
|
)
|
||||||
return set(merge_conflict_filenames) | set(merge_diff_filenames)
|
return set(merge_conflict_filenames) | set(merge_diff_filenames)
|
||||||
|
|
||||||
|
|
||||||
def get_staged_files(cwd=None):
|
def get_staged_files(cwd=None):
|
||||||
return zsplit(cmd_output(
|
return zsplit(
|
||||||
'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',
|
cmd_output(
|
||||||
# Everything except for D
|
'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',
|
||||||
'--diff-filter=ACMRTUXB',
|
# Everything except for D
|
||||||
cwd=cwd,
|
'--diff-filter=ACMRTUXB',
|
||||||
)[1])
|
cwd=cwd,
|
||||||
|
)[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def intent_to_add_files():
|
def intent_to_add_files():
|
||||||
|
|
@ -119,10 +123,12 @@ def get_all_files():
|
||||||
|
|
||||||
|
|
||||||
def get_changed_files(new, old):
|
def get_changed_files(new, old):
|
||||||
return zsplit(cmd_output(
|
return zsplit(
|
||||||
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
cmd_output(
|
||||||
'{}...{}'.format(old, new),
|
'git', 'diff', '--name-only', '--no-ext-diff', '-z',
|
||||||
)[1])
|
'{}...{}'.format(old, new),
|
||||||
|
)[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def head_rev(remote):
|
def head_rev(remote):
|
||||||
|
|
|
||||||
|
|
@ -58,9 +58,9 @@ def main(argv=None):
|
||||||
parser.add_argument('--dest', default='pre_commit/resources')
|
parser.add_argument('--dest', default='pre_commit/resources')
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
for archive_name, repo, ref in REPOS:
|
for archive_name, repo, ref in REPOS:
|
||||||
output.write_line('Making {}.tar.gz for {}@{}'.format(
|
output.write_line(
|
||||||
archive_name, repo, ref,
|
'Making {}.tar.gz for {}@{}'.format(archive_name, repo, ref),
|
||||||
))
|
)
|
||||||
make_archive(archive_name, repo, ref, args.dest)
|
make_archive(archive_name, repo, ref, args.dest)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,41 +33,47 @@ def test_check_type_tag_failures(value):
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
('config_obj', 'expected'), (
|
('config_obj', 'expected'), (
|
||||||
(
|
(
|
||||||
{'repos': [{
|
{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repos': [{
|
||||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
}]},
|
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
True,
|
True,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{'repos': [{
|
{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repos': [{
|
||||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'hooks': [
|
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
{
|
'hooks': [
|
||||||
'id': 'pyflakes',
|
{
|
||||||
'files': '\\.py$',
|
'id': 'pyflakes',
|
||||||
'args': ['foo', 'bar', 'baz'],
|
'files': '\\.py$',
|
||||||
},
|
'args': ['foo', 'bar', 'baz'],
|
||||||
],
|
},
|
||||||
}]},
|
],
|
||||||
|
}],
|
||||||
|
},
|
||||||
True,
|
True,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{'repos': [{
|
{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repos': [{
|
||||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'hooks': [
|
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
{
|
'hooks': [
|
||||||
'id': 'pyflakes',
|
{
|
||||||
'files': '\\.py$',
|
'id': 'pyflakes',
|
||||||
# Exclude pattern must be a string
|
'files': '\\.py$',
|
||||||
'exclude': 0,
|
# Exclude pattern must be a string
|
||||||
'args': ['foo', 'bar', 'baz'],
|
'exclude': 0,
|
||||||
},
|
'args': ['foo', 'bar', 'baz'],
|
||||||
],
|
},
|
||||||
}]},
|
],
|
||||||
|
}],
|
||||||
|
},
|
||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|
|
||||||
|
|
@ -33,11 +33,9 @@ def no_warnings(recwarn):
|
||||||
message.startswith('Not importing directory ') and
|
message.startswith('Not importing directory ') and
|
||||||
' missing __init__' in message
|
' missing __init__' in message
|
||||||
):
|
):
|
||||||
warnings.append('{}:{} {}'.format(
|
warnings.append(
|
||||||
warning.filename,
|
'{}:{} {}'.format(warning.filename, warning.lineno, message),
|
||||||
warning.lineno,
|
)
|
||||||
message,
|
|
||||||
))
|
|
||||||
assert not warnings
|
assert not warnings
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,9 +22,9 @@ def test_install_rbenv(tempdir_factory):
|
||||||
# Should be able to activate using our script and access rbenv
|
# Should be able to activate using our script and access rbenv
|
||||||
cmd_output(
|
cmd_output(
|
||||||
'bash', '-c',
|
'bash', '-c',
|
||||||
'. {} && rbenv --help'.format(pipes.quote(prefix.path(
|
'. {} && rbenv --help'.format(
|
||||||
'rbenv-default', 'bin', 'activate',
|
pipes.quote(prefix.path('rbenv-default', 'bin', 'activate')),
|
||||||
))),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -36,7 +36,7 @@ def test_install_rbenv_with_version(tempdir_factory):
|
||||||
# Should be able to activate and use rbenv install
|
# Should be able to activate and use rbenv install
|
||||||
cmd_output(
|
cmd_output(
|
||||||
'bash', '-c',
|
'bash', '-c',
|
||||||
'. {} && rbenv install --help'.format(pipes.quote(prefix.path(
|
'. {} && rbenv install --help'.format(
|
||||||
'rbenv-1.9.3p547', 'bin', 'activate',
|
pipes.quote(prefix.path('rbenv-1.9.3p547', 'bin', 'activate')),
|
||||||
))),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -66,9 +66,9 @@ def test_find_executable_path_ext(in_tmpdir):
|
||||||
"""Windows exports PATHEXT as a list of extensions to automatically add
|
"""Windows exports PATHEXT as a list of extensions to automatically add
|
||||||
to executables when doing PATH searching.
|
to executables when doing PATH searching.
|
||||||
"""
|
"""
|
||||||
exe_path = os.path.abspath(write_executable(
|
exe_path = os.path.abspath(
|
||||||
'/usr/bin/env sh', filename='run.myext',
|
write_executable('/usr/bin/env sh', filename='run.myext'),
|
||||||
))
|
)
|
||||||
env_path = {'PATH': os.path.dirname(exe_path)}
|
env_path = {'PATH': os.path.dirname(exe_path)}
|
||||||
env_path_ext = dict(env_path, PATHEXT=os.pathsep.join(('.exe', '.myext')))
|
env_path_ext = dict(env_path, PATHEXT=os.pathsep.join(('.exe', '.myext')))
|
||||||
assert parse_shebang.find_executable('run') is None
|
assert parse_shebang.find_executable('run') is None
|
||||||
|
|
|
||||||
|
|
@ -291,9 +291,11 @@ def test_additional_rust_cli_dependencies_installed(
|
||||||
# A small rust package with no dependencies.
|
# A small rust package with no dependencies.
|
||||||
config['hooks'][0]['additional_dependencies'] = [dep]
|
config['hooks'][0]['additional_dependencies'] = [dep]
|
||||||
hook = _get_hook(config, store, 'rust-hook')
|
hook = _get_hook(config, store, 'rust-hook')
|
||||||
binaries = os.listdir(hook.prefix.path(
|
binaries = os.listdir(
|
||||||
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
hook.prefix.path(
|
||||||
))
|
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||||
|
),
|
||||||
|
)
|
||||||
# normalize for windows
|
# normalize for windows
|
||||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||||
assert 'shellharden' in binaries
|
assert 'shellharden' in binaries
|
||||||
|
|
@ -308,9 +310,11 @@ def test_additional_rust_lib_dependencies_installed(
|
||||||
deps = ['shellharden:3.1.0']
|
deps = ['shellharden:3.1.0']
|
||||||
config['hooks'][0]['additional_dependencies'] = deps
|
config['hooks'][0]['additional_dependencies'] = deps
|
||||||
hook = _get_hook(config, store, 'rust-hook')
|
hook = _get_hook(config, store, 'rust-hook')
|
||||||
binaries = os.listdir(hook.prefix.path(
|
binaries = os.listdir(
|
||||||
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
hook.prefix.path(
|
||||||
))
|
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||||
|
),
|
||||||
|
)
|
||||||
# normalize for windows
|
# normalize for windows
|
||||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||||
assert 'rust-hello-world' in binaries
|
assert 'rust-hello-world' in binaries
|
||||||
|
|
@ -507,9 +511,11 @@ def test_additional_golang_dependencies_installed(
|
||||||
deps = ['github.com/golang/example/hello']
|
deps = ['github.com/golang/example/hello']
|
||||||
config['hooks'][0]['additional_dependencies'] = deps
|
config['hooks'][0]['additional_dependencies'] = deps
|
||||||
hook = _get_hook(config, store, 'golang-hook')
|
hook = _get_hook(config, store, 'golang-hook')
|
||||||
binaries = os.listdir(hook.prefix.path(
|
binaries = os.listdir(
|
||||||
helpers.environment_dir(golang.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
hook.prefix.path(
|
||||||
))
|
helpers.environment_dir(golang.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||||
|
),
|
||||||
|
)
|
||||||
# normalize for windows
|
# normalize for windows
|
||||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||||
assert 'hello' in binaries
|
assert 'hello' in binaries
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue