mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-19 17:14:43 +04:00
Merge pull request #610 from pre-commit/config_v2
Enable map configurations (config v2).
This commit is contained in:
commit
3619f830fe
15 changed files with 295 additions and 63 deletions
|
|
@ -1,3 +1,4 @@
|
||||||
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks.git
|
- repo: https://github.com/pre-commit/pre-commit-hooks.git
|
||||||
sha: v0.9.1
|
sha: v0.9.1
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import collections
|
||||||
import functools
|
import functools
|
||||||
|
|
||||||
from aspy.yaml import ordered_load
|
from aspy.yaml import ordered_load
|
||||||
|
|
@ -125,7 +126,11 @@ CONFIG_REPO_DICT = schema.Map(
|
||||||
ensure_absent=True,
|
ensure_absent=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
CONFIG_SCHEMA = schema.Array(CONFIG_REPO_DICT)
|
CONFIG_SCHEMA = schema.Map(
|
||||||
|
'Config', None,
|
||||||
|
|
||||||
|
schema.RequiredRecurse('repos', schema.Array(CONFIG_REPO_DICT)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_local_repo(repo_entry):
|
def is_local_repo(repo_entry):
|
||||||
|
|
@ -136,10 +141,19 @@ class InvalidConfigError(FatalError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ordered_load_normalize_legacy_config(contents):
|
||||||
|
data = ordered_load(contents)
|
||||||
|
if isinstance(data, list):
|
||||||
|
# TODO: Once happy, issue a deprecation warning and instructions
|
||||||
|
return collections.OrderedDict([('repos', data)])
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
load_config = functools.partial(
|
load_config = functools.partial(
|
||||||
schema.load_from_filename,
|
schema.load_from_filename,
|
||||||
schema=CONFIG_SCHEMA,
|
schema=CONFIG_SCHEMA,
|
||||||
load_strategy=ordered_load,
|
load_strategy=ordered_load_normalize_legacy_config,
|
||||||
exc_tp=InvalidConfigError,
|
exc_tp=InvalidConfigError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ import pre_commit.constants as C
|
||||||
from pre_commit import output
|
from pre_commit import output
|
||||||
from pre_commit.clientlib import is_local_repo
|
from pre_commit.clientlib import is_local_repo
|
||||||
from pre_commit.clientlib import load_config
|
from pre_commit.clientlib import load_config
|
||||||
|
from pre_commit.commands.migrate_config import migrate_config
|
||||||
from pre_commit.repository import Repository
|
from pre_commit.repository import Repository
|
||||||
from pre_commit.util import CalledProcessError
|
from pre_commit.util import CalledProcessError
|
||||||
from pre_commit.util import cmd_output
|
from pre_commit.util import cmd_output
|
||||||
|
|
@ -104,21 +105,22 @@ def _write_new_config_file(path, output):
|
||||||
def autoupdate(runner, tags_only):
|
def autoupdate(runner, tags_only):
|
||||||
"""Auto-update the pre-commit config to the latest versions of repos."""
|
"""Auto-update the pre-commit config to the latest versions of repos."""
|
||||||
retv = 0
|
retv = 0
|
||||||
output_configs = []
|
retv |= migrate_config(runner, quiet=True)
|
||||||
|
output_repos = []
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
input_configs = load_config(runner.config_file_path)
|
input_config = load_config(runner.config_file_path)
|
||||||
|
|
||||||
for repo_config in input_configs:
|
for repo_config in input_config['repos']:
|
||||||
if is_local_repo(repo_config):
|
if is_local_repo(repo_config):
|
||||||
output_configs.append(repo_config)
|
output_repos.append(repo_config)
|
||||||
continue
|
continue
|
||||||
output.write('Updating {}...'.format(repo_config['repo']))
|
output.write('Updating {}...'.format(repo_config['repo']))
|
||||||
try:
|
try:
|
||||||
new_repo_config = _update_repo(repo_config, runner, tags_only)
|
new_repo_config = _update_repo(repo_config, runner, tags_only)
|
||||||
except RepositoryCannotBeUpdatedError as error:
|
except RepositoryCannotBeUpdatedError as error:
|
||||||
output.write_line(error.args[0])
|
output.write_line(error.args[0])
|
||||||
output_configs.append(repo_config)
|
output_repos.append(repo_config)
|
||||||
retv = 1
|
retv = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
@ -127,12 +129,14 @@ def autoupdate(runner, tags_only):
|
||||||
output.write_line('updating {} -> {}.'.format(
|
output.write_line('updating {} -> {}.'.format(
|
||||||
repo_config['sha'], new_repo_config['sha'],
|
repo_config['sha'], new_repo_config['sha'],
|
||||||
))
|
))
|
||||||
output_configs.append(new_repo_config)
|
output_repos.append(new_repo_config)
|
||||||
else:
|
else:
|
||||||
output.write_line('already up to date.')
|
output.write_line('already up to date.')
|
||||||
output_configs.append(repo_config)
|
output_repos.append(repo_config)
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
_write_new_config_file(runner.config_file_path, output_configs)
|
output_config = input_config.copy()
|
||||||
|
output_config['repos'] = output_repos
|
||||||
|
_write_new_config_file(runner.config_file_path, output_config)
|
||||||
|
|
||||||
return retv
|
return retv
|
||||||
|
|
|
||||||
52
pre_commit/commands/migrate_config.py
Normal file
52
pre_commit/commands/migrate_config.py
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from aspy.yaml import ordered_load
|
||||||
|
|
||||||
|
|
||||||
|
def _indent(s):
|
||||||
|
lines = s.splitlines(True)
|
||||||
|
return ''.join(' ' * 4 + line if line.strip() else line for line in lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_header_line(line):
|
||||||
|
return (line.startswith(('#', '---')) or not line.strip())
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_config(runner, quiet=False):
|
||||||
|
retv = 0
|
||||||
|
|
||||||
|
with io.open(runner.config_file_path) as f:
|
||||||
|
contents = f.read()
|
||||||
|
|
||||||
|
# Find the first non-header line
|
||||||
|
lines = contents.splitlines(True)
|
||||||
|
i = 0
|
||||||
|
while _is_header_line(lines[i]):
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
header = ''.join(lines[:i])
|
||||||
|
rest = ''.join(lines[i:])
|
||||||
|
|
||||||
|
if isinstance(ordered_load(contents), list):
|
||||||
|
# If they are using the "default" flow style of yaml, this operation
|
||||||
|
# will yield a valid configuration
|
||||||
|
try:
|
||||||
|
trial_contents = header + 'repos:\n' + rest
|
||||||
|
yaml.load(trial_contents)
|
||||||
|
contents = trial_contents
|
||||||
|
except yaml.YAMLError:
|
||||||
|
contents = header + 'repos:\n' + _indent(rest)
|
||||||
|
|
||||||
|
with io.open(runner.config_file_path, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
print('Configuration has been migrated.')
|
||||||
|
retv = 1
|
||||||
|
elif not quiet:
|
||||||
|
print('Configuration is already migrated.')
|
||||||
|
|
||||||
|
return retv
|
||||||
|
|
@ -10,8 +10,9 @@ from __future__ import unicode_literals
|
||||||
SAMPLE_CONFIG = '''\
|
SAMPLE_CONFIG = '''\
|
||||||
# See http://pre-commit.com for more information
|
# See http://pre-commit.com for more information
|
||||||
# See http://pre-commit.com/hooks.html for more hooks
|
# See http://pre-commit.com/hooks.html for more hooks
|
||||||
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
sha: v0.9.1
|
sha: v0.9.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ from pre_commit.commands.clean import clean
|
||||||
from pre_commit.commands.install_uninstall import install
|
from pre_commit.commands.install_uninstall import install
|
||||||
from pre_commit.commands.install_uninstall import install_hooks
|
from pre_commit.commands.install_uninstall import install_hooks
|
||||||
from pre_commit.commands.install_uninstall import uninstall
|
from pre_commit.commands.install_uninstall import uninstall
|
||||||
|
from pre_commit.commands.migrate_config import migrate_config
|
||||||
from pre_commit.commands.run import run
|
from pre_commit.commands.run import run
|
||||||
from pre_commit.commands.sample_config import sample_config
|
from pre_commit.commands.sample_config import sample_config
|
||||||
from pre_commit.error_handler import error_handler
|
from pre_commit.error_handler import error_handler
|
||||||
|
|
@ -131,6 +132,13 @@ def main(argv=None):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
migrate_config_parser = subparsers.add_parser(
|
||||||
|
'migrate-config',
|
||||||
|
help='Migrate list configuration to new map configuration.',
|
||||||
|
)
|
||||||
|
_add_color_option(migrate_config_parser)
|
||||||
|
_add_config_option(migrate_config_parser)
|
||||||
|
|
||||||
run_parser = subparsers.add_parser('run', help='Run hooks.')
|
run_parser = subparsers.add_parser('run', help='Run hooks.')
|
||||||
_add_color_option(run_parser)
|
_add_color_option(run_parser)
|
||||||
_add_config_option(run_parser)
|
_add_config_option(run_parser)
|
||||||
|
|
@ -217,6 +225,8 @@ def main(argv=None):
|
||||||
if args.tags_only:
|
if args.tags_only:
|
||||||
logger.warning('--tags-only is the default')
|
logger.warning('--tags-only is the default')
|
||||||
return autoupdate(runner, tags_only=not args.bleeding_edge)
|
return autoupdate(runner, tags_only=not args.bleeding_edge)
|
||||||
|
elif args.command == 'migrate-config':
|
||||||
|
return migrate_config(runner)
|
||||||
elif args.command == 'run':
|
elif args.command == 'run':
|
||||||
return run(runner, args)
|
return run(runner, args)
|
||||||
elif args.command == 'sample-config':
|
elif args.command == 'sample-config':
|
||||||
|
|
|
||||||
|
|
@ -40,11 +40,11 @@ class Runner(object):
|
||||||
@cached_property
|
@cached_property
|
||||||
def repositories(self):
|
def repositories(self):
|
||||||
"""Returns a tuple of the configured repositories."""
|
"""Returns a tuple of the configured repositories."""
|
||||||
config = load_config(self.config_file_path)
|
repos = load_config(self.config_file_path)['repos']
|
||||||
repositories = tuple(Repository.create(x, self.store) for x in config)
|
repos = tuple(Repository.create(x, self.store) for x in repos)
|
||||||
for repository in repositories:
|
for repo in repos:
|
||||||
repository.require_installed()
|
repo.require_installed()
|
||||||
return repositories
|
return repos
|
||||||
|
|
||||||
def get_hook_path(self, hook_type):
|
def get_hook_path(self, hook_type):
|
||||||
return os.path.join(self.git_dir, 'hooks', hook_type)
|
return os.path.join(self.git_dir, 'hooks', hook_type)
|
||||||
|
|
|
||||||
|
|
@ -142,9 +142,13 @@ class Map(collections.namedtuple('Map', ('object_name', 'id_key', 'items'))):
|
||||||
raise ValidationError('Expected a {} map but got a {}'.format(
|
raise ValidationError('Expected a {} map but got a {}'.format(
|
||||||
self.object_name, type(v).__name__,
|
self.object_name, type(v).__name__,
|
||||||
))
|
))
|
||||||
with validate_context('At {}({}={!r})'.format(
|
if self.id_key is None:
|
||||||
|
context = 'At {}()'.format(self.object_name)
|
||||||
|
else:
|
||||||
|
context = 'At {}({}={!r})'.format(
|
||||||
self.object_name, self.id_key, v.get(self.id_key, MISSING),
|
self.object_name, self.id_key, v.get(self.id_key, MISSING),
|
||||||
)):
|
)
|
||||||
|
with validate_context(context):
|
||||||
for item in self.items:
|
for item in self.items:
|
||||||
item.check(v)
|
item.check(v)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -92,8 +92,9 @@ def make_config_from_repo(repo_path, sha=None, hooks=None, check=True):
|
||||||
))
|
))
|
||||||
|
|
||||||
if check:
|
if check:
|
||||||
wrapped = validate([config], CONFIG_SCHEMA)
|
wrapped = validate({'repos': [config]}, CONFIG_SCHEMA)
|
||||||
config, = apply_defaults(wrapped, CONFIG_SCHEMA)
|
wrapped = apply_defaults(wrapped, CONFIG_SCHEMA)
|
||||||
|
config, = wrapped['repos']
|
||||||
return config
|
return config
|
||||||
else:
|
else:
|
||||||
return config
|
return config
|
||||||
|
|
@ -106,9 +107,9 @@ def read_config(directory, config_file=C.CONFIG_FILE):
|
||||||
|
|
||||||
|
|
||||||
def write_config(directory, config, config_file=C.CONFIG_FILE):
|
def write_config(directory, config, config_file=C.CONFIG_FILE):
|
||||||
if type(config) is not list:
|
if type(config) is not list and 'repos' not in config:
|
||||||
assert type(config) is OrderedDict
|
assert type(config) is OrderedDict
|
||||||
config = [config]
|
config = {'repos': [config]}
|
||||||
with io.open(os.path.join(directory, config_file), 'w') as outfile:
|
with io.open(os.path.join(directory, config_file), 'w') as outfile:
|
||||||
outfile.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))
|
outfile.write(ordered_dump(config, **C.YAML_DUMP_KWARGS))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -60,15 +60,15 @@ def test_validate_config_main(args, expected_output):
|
||||||
('config_obj', 'expected'), (
|
('config_obj', 'expected'), (
|
||||||
([], False),
|
([], False),
|
||||||
(
|
(
|
||||||
[{
|
{'repos': [{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
||||||
}],
|
}]},
|
||||||
True,
|
True,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
[{
|
{'repos': [{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
'hooks': [
|
'hooks': [
|
||||||
|
|
@ -78,11 +78,11 @@ def test_validate_config_main(args, expected_output):
|
||||||
'args': ['foo', 'bar', 'baz'],
|
'args': ['foo', 'bar', 'baz'],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}],
|
}]},
|
||||||
True,
|
True,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
[{
|
{'repos': [{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||||
'hooks': [
|
'hooks': [
|
||||||
|
|
@ -94,7 +94,7 @@ def test_validate_config_main(args, expected_output):
|
||||||
'args': ['foo', 'bar', 'baz'],
|
'args': ['foo', 'bar', 'baz'],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}],
|
}]},
|
||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|
@ -104,29 +104,25 @@ def test_config_valid(config_obj, expected):
|
||||||
assert ret is expected
|
assert ret is expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
def test_config_with_local_hooks_definition_fails():
|
||||||
'config_obj', (
|
config_obj = {'repos': [{
|
||||||
[{
|
'repo': 'local',
|
||||||
'repo': 'local',
|
'sha': 'foo',
|
||||||
'sha': 'foo',
|
'hooks': [{
|
||||||
'hooks': [{
|
'id': 'do_not_commit',
|
||||||
'id': 'do_not_commit',
|
'name': 'Block if "DO NOT COMMIT" is found',
|
||||||
'name': 'Block if "DO NOT COMMIT" is found',
|
'entry': 'DO NOT COMMIT',
|
||||||
'entry': 'DO NOT COMMIT',
|
'language': 'pcre',
|
||||||
'language': 'pcre',
|
'files': '^(.*)$',
|
||||||
'files': '^(.*)$',
|
|
||||||
}],
|
|
||||||
}],
|
}],
|
||||||
),
|
}]}
|
||||||
)
|
|
||||||
def test_config_with_local_hooks_definition_fails(config_obj):
|
|
||||||
with pytest.raises(schema.ValidationError):
|
with pytest.raises(schema.ValidationError):
|
||||||
schema.validate(config_obj, CONFIG_SCHEMA)
|
schema.validate(config_obj, CONFIG_SCHEMA)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'config_obj', (
|
'config_obj', (
|
||||||
[{
|
{'repos': [{
|
||||||
'repo': 'local',
|
'repo': 'local',
|
||||||
'hooks': [{
|
'hooks': [{
|
||||||
'id': 'arg-per-line',
|
'id': 'arg-per-line',
|
||||||
|
|
@ -136,8 +132,8 @@ def test_config_with_local_hooks_definition_fails(config_obj):
|
||||||
'files': '',
|
'files': '',
|
||||||
'args': ['hello', 'world'],
|
'args': ['hello', 'world'],
|
||||||
}],
|
}],
|
||||||
}],
|
}]},
|
||||||
[{
|
{'repos': [{
|
||||||
'repo': 'local',
|
'repo': 'local',
|
||||||
'hooks': [{
|
'hooks': [{
|
||||||
'id': 'arg-per-line',
|
'id': 'arg-per-line',
|
||||||
|
|
@ -147,7 +143,7 @@ def test_config_with_local_hooks_definition_fails(config_obj):
|
||||||
'files': '',
|
'files': '',
|
||||||
'args': ['hello', 'world'],
|
'args': ['hello', 'world'],
|
||||||
}],
|
}],
|
||||||
}],
|
}]},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_config_with_local_hooks_definition_passes(config_obj):
|
def test_config_with_local_hooks_definition_passes(config_obj):
|
||||||
|
|
|
||||||
|
|
@ -128,6 +128,7 @@ def test_does_not_reformat(
|
||||||
out_of_date_repo, mock_out_store_directory, in_tmpdir,
|
out_of_date_repo, mock_out_store_directory, in_tmpdir,
|
||||||
):
|
):
|
||||||
fmt = (
|
fmt = (
|
||||||
|
'repos:\n'
|
||||||
'- repo: {}\n'
|
'- repo: {}\n'
|
||||||
' sha: {} # definitely the version I want!\n'
|
' sha: {} # definitely the version I want!\n'
|
||||||
' hooks:\n'
|
' hooks:\n'
|
||||||
|
|
@ -153,7 +154,7 @@ def test_loses_formatting_when_not_detectable(
|
||||||
is abandoned.
|
is abandoned.
|
||||||
"""
|
"""
|
||||||
config = (
|
config = (
|
||||||
'[\n'
|
'repos: [\n'
|
||||||
' {{\n'
|
' {{\n'
|
||||||
' repo: {}, sha: {},\n'
|
' repo: {}, sha: {},\n'
|
||||||
' hooks: [\n'
|
' hooks: [\n'
|
||||||
|
|
@ -171,6 +172,7 @@ def test_loses_formatting_when_not_detectable(
|
||||||
autoupdate(Runner('.', C.CONFIG_FILE), tags_only=False)
|
autoupdate(Runner('.', C.CONFIG_FILE), tags_only=False)
|
||||||
after = open(C.CONFIG_FILE).read()
|
after = open(C.CONFIG_FILE).read()
|
||||||
expected = (
|
expected = (
|
||||||
|
'repos:\n'
|
||||||
'- repo: {}\n'
|
'- repo: {}\n'
|
||||||
' sha: {}\n'
|
' sha: {}\n'
|
||||||
' hooks:\n'
|
' hooks:\n'
|
||||||
|
|
@ -274,7 +276,7 @@ def test_autoupdate_local_hooks(tempdir_factory):
|
||||||
assert autoupdate(runner, tags_only=False) == 0
|
assert autoupdate(runner, tags_only=False) == 0
|
||||||
new_config_writen = load_config(runner.config_file_path)
|
new_config_writen = load_config(runner.config_file_path)
|
||||||
assert len(new_config_writen) == 1
|
assert len(new_config_writen) == 1
|
||||||
assert new_config_writen[0] == config
|
assert new_config_writen['repos'][0] == config
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_local_hooks_with_out_of_date_repo(
|
def test_autoupdate_local_hooks_with_out_of_date_repo(
|
||||||
|
|
@ -284,10 +286,36 @@ def test_autoupdate_local_hooks_with_out_of_date_repo(
|
||||||
out_of_date_repo.path, sha=out_of_date_repo.original_sha, check=False,
|
out_of_date_repo.path, sha=out_of_date_repo.original_sha, check=False,
|
||||||
)
|
)
|
||||||
local_config = config_with_local_hooks()
|
local_config = config_with_local_hooks()
|
||||||
config = [local_config, stale_config]
|
config = {'repos': [local_config, stale_config]}
|
||||||
write_config('.', config)
|
write_config('.', config)
|
||||||
runner = Runner('.', C.CONFIG_FILE)
|
runner = Runner('.', C.CONFIG_FILE)
|
||||||
assert autoupdate(runner, tags_only=False) == 0
|
assert autoupdate(runner, tags_only=False) == 0
|
||||||
new_config_writen = load_config(runner.config_file_path)
|
new_config_writen = load_config(runner.config_file_path)
|
||||||
assert len(new_config_writen) == 2
|
assert len(new_config_writen['repos']) == 2
|
||||||
assert new_config_writen[0] == local_config
|
assert new_config_writen['repos'][0] == local_config
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_old_format_to_new_format(tmpdir, capsys):
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n',
|
||||||
|
)
|
||||||
|
ret = autoupdate(Runner(tmpdir.strpath, C.CONFIG_FILE), tags_only=True)
|
||||||
|
assert ret == 1
|
||||||
|
contents = cfg.read()
|
||||||
|
assert contents == (
|
||||||
|
'repos:\n'
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n'
|
||||||
|
)
|
||||||
|
out, _ = capsys.readouterr()
|
||||||
|
assert out == 'Configuration has been migrated.\n'
|
||||||
|
|
|
||||||
120
tests/commands/migrate_config_test.py
Normal file
120
tests/commands/migrate_config_test.py
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import pre_commit.constants as C
|
||||||
|
from pre_commit.commands.migrate_config import _indent
|
||||||
|
from pre_commit.commands.migrate_config import migrate_config
|
||||||
|
from pre_commit.runner import Runner
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('s', 'expected'),
|
||||||
|
(
|
||||||
|
('', ''),
|
||||||
|
('a', ' a'),
|
||||||
|
('foo\nbar', ' foo\n bar'),
|
||||||
|
('foo\n\nbar\n', ' foo\n\n bar\n'),
|
||||||
|
('\n\n\n', '\n\n\n'),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_indent(s, expected):
|
||||||
|
assert _indent(s) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_migrate_config_normal_format(tmpdir, capsys):
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n',
|
||||||
|
)
|
||||||
|
assert migrate_config(Runner(tmpdir.strpath, C.CONFIG_FILE)) == 1
|
||||||
|
out, _ = capsys.readouterr()
|
||||||
|
assert out == 'Configuration has been migrated.\n'
|
||||||
|
contents = cfg.read()
|
||||||
|
assert contents == (
|
||||||
|
'repos:\n'
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_migrate_config_document_marker(tmpdir):
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(
|
||||||
|
'# comment\n'
|
||||||
|
'\n'
|
||||||
|
'---\n'
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n',
|
||||||
|
)
|
||||||
|
assert migrate_config(Runner(tmpdir.strpath, C.CONFIG_FILE)) == 1
|
||||||
|
contents = cfg.read()
|
||||||
|
assert contents == (
|
||||||
|
'# comment\n'
|
||||||
|
'\n'
|
||||||
|
'---\n'
|
||||||
|
'repos:\n'
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_migrate_config_list_literal(tmpdir):
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(
|
||||||
|
'[{\n'
|
||||||
|
' repo: local,\n'
|
||||||
|
' hooks: [{\n'
|
||||||
|
' id: foo, name: foo, entry: ./bin/foo.sh,\n'
|
||||||
|
' language: script,\n'
|
||||||
|
' }]\n'
|
||||||
|
'}]',
|
||||||
|
)
|
||||||
|
assert migrate_config(Runner(tmpdir.strpath, C.CONFIG_FILE)) == 1
|
||||||
|
contents = cfg.read()
|
||||||
|
assert contents == (
|
||||||
|
'repos:\n'
|
||||||
|
' [{\n'
|
||||||
|
' repo: local,\n'
|
||||||
|
' hooks: [{\n'
|
||||||
|
' id: foo, name: foo, entry: ./bin/foo.sh,\n'
|
||||||
|
' language: script,\n'
|
||||||
|
' }]\n'
|
||||||
|
' }]'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_already_migrated_configuration_noop(tmpdir, capsys):
|
||||||
|
contents = (
|
||||||
|
'repos:\n'
|
||||||
|
'- repo: local\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
' name: foo\n'
|
||||||
|
' entry: ./bin/foo.sh\n'
|
||||||
|
' language: script\n'
|
||||||
|
)
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(contents)
|
||||||
|
assert not migrate_config(Runner(tmpdir.strpath, C.CONFIG_FILE))
|
||||||
|
out, _ = capsys.readouterr()
|
||||||
|
assert out == 'Configuration is already migrated.\n'
|
||||||
|
assert cfg.read() == contents
|
||||||
|
|
@ -272,7 +272,7 @@ def test_always_run(
|
||||||
cap_out, repo_with_passing_hook, mock_out_store_directory,
|
cap_out, repo_with_passing_hook, mock_out_store_directory,
|
||||||
):
|
):
|
||||||
with modify_config() as config:
|
with modify_config() as config:
|
||||||
config[0]['hooks'][0]['always_run'] = True
|
config['repos'][0]['hooks'][0]['always_run'] = True
|
||||||
_test_run(
|
_test_run(
|
||||||
cap_out,
|
cap_out,
|
||||||
repo_with_passing_hook,
|
repo_with_passing_hook,
|
||||||
|
|
@ -288,7 +288,7 @@ def test_always_run_alt_config(
|
||||||
):
|
):
|
||||||
repo_root = '.'
|
repo_root = '.'
|
||||||
config = read_config(repo_root)
|
config = read_config(repo_root)
|
||||||
config[0]['hooks'][0]['always_run'] = True
|
config['repos'][0]['hooks'][0]['always_run'] = True
|
||||||
alt_config_file = 'alternate_config.yaml'
|
alt_config_file = 'alternate_config.yaml'
|
||||||
add_config_to_repo(repo_root, config, config_file=alt_config_file)
|
add_config_to_repo(repo_root, config, config_file=alt_config_file)
|
||||||
|
|
||||||
|
|
@ -428,7 +428,7 @@ def test_multiple_hooks_same_id(
|
||||||
with cwd(repo_with_passing_hook):
|
with cwd(repo_with_passing_hook):
|
||||||
# Add bash hook on there again
|
# Add bash hook on there again
|
||||||
with modify_config() as config:
|
with modify_config() as config:
|
||||||
config[0]['hooks'].append({'id': 'bash_hook'})
|
config['repos'][0]['hooks'].append({'id': 'bash_hook'})
|
||||||
stage_a_file()
|
stage_a_file()
|
||||||
|
|
||||||
ret, output = _do_run(cap_out, repo_with_passing_hook, _get_opts())
|
ret, output = _do_run(cap_out, repo_with_passing_hook, _get_opts())
|
||||||
|
|
@ -455,7 +455,7 @@ def test_stdout_write_bug_py26(
|
||||||
):
|
):
|
||||||
with cwd(repo_with_failing_hook):
|
with cwd(repo_with_failing_hook):
|
||||||
with modify_config() as config:
|
with modify_config() as config:
|
||||||
config[0]['hooks'][0]['args'] = ['☃']
|
config['repos'][0]['hooks'][0]['args'] = ['☃']
|
||||||
stage_a_file()
|
stage_a_file()
|
||||||
|
|
||||||
install(Runner(repo_with_failing_hook, C.CONFIG_FILE))
|
install(Runner(repo_with_failing_hook, C.CONFIG_FILE))
|
||||||
|
|
@ -505,7 +505,7 @@ def test_lots_of_files(mock_out_store_directory, tempdir_factory):
|
||||||
with cwd(git_path):
|
with cwd(git_path):
|
||||||
# Override files so we run against them
|
# Override files so we run against them
|
||||||
with modify_config() as config:
|
with modify_config() as config:
|
||||||
config[0]['hooks'][0]['files'] = ''
|
config['repos'][0]['hooks'][0]['files'] = ''
|
||||||
|
|
||||||
# Write a crap ton of files
|
# Write a crap ton of files
|
||||||
for i in range(400):
|
for i in range(400):
|
||||||
|
|
@ -660,7 +660,7 @@ def test_local_hook_fails(
|
||||||
def modified_config_repo(repo_with_passing_hook):
|
def modified_config_repo(repo_with_passing_hook):
|
||||||
with modify_config(repo_with_passing_hook, commit=False) as config:
|
with modify_config(repo_with_passing_hook, commit=False) as config:
|
||||||
# Some minor modification
|
# Some minor modification
|
||||||
config[0]['hooks'][0]['files'] = ''
|
config['repos'][0]['hooks'][0]['files'] = ''
|
||||||
yield repo_with_passing_hook
|
yield repo_with_passing_hook
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -721,8 +721,8 @@ def test_pass_filenames(
|
||||||
expected_out,
|
expected_out,
|
||||||
):
|
):
|
||||||
with modify_config() as config:
|
with modify_config() as config:
|
||||||
config[0]['hooks'][0]['pass_filenames'] = pass_filenames
|
config['repos'][0]['hooks'][0]['pass_filenames'] = pass_filenames
|
||||||
config[0]['hooks'][0]['args'] = hook_args
|
config['repos'][0]['hooks'][0]['args'] = hook_args
|
||||||
stage_a_file()
|
stage_a_file()
|
||||||
ret, printed = _do_run(
|
ret, printed = _do_run(
|
||||||
cap_out, repo_with_passing_hook, _get_opts(verbose=True),
|
cap_out, repo_with_passing_hook, _get_opts(verbose=True),
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,9 @@ def test_sample_config(capsys):
|
||||||
assert out == '''\
|
assert out == '''\
|
||||||
# See http://pre-commit.com for more information
|
# See http://pre-commit.com for more information
|
||||||
# See http://pre-commit.com/hooks.html for more hooks
|
# See http://pre-commit.com/hooks.html for more hooks
|
||||||
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
sha: v0.9.1
|
sha: v0.9.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,8 @@ from testing.auto_namedtuple import auto_namedtuple
|
||||||
|
|
||||||
|
|
||||||
FNS = (
|
FNS = (
|
||||||
'autoupdate', 'clean', 'install', 'install_hooks', 'run', 'sample_config',
|
'autoupdate', 'clean', 'install', 'install_hooks', 'migrate_config', 'run',
|
||||||
'uninstall',
|
'sample_config', 'uninstall',
|
||||||
)
|
)
|
||||||
CMDS = tuple(fn.replace('_', '-') for fn in FNS)
|
CMDS = tuple(fn.replace('_', '-') for fn in FNS)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue