mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Adding support for locally-defined hooks
This commit is contained in:
parent
d97ea30c4b
commit
b68261c720
8 changed files with 241 additions and 40 deletions
|
|
@ -6,6 +6,13 @@ from pre_commit.clientlib.validate_base import is_regex_valid
|
||||||
from pre_commit.errors import FatalError
|
from pre_commit.errors import FatalError
|
||||||
|
|
||||||
|
|
||||||
|
_LOCAL_HOOKS_MAGIC_REPO_STRING = 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def is_local_hooks(repo_entry):
|
||||||
|
return repo_entry['repo'] == _LOCAL_HOOKS_MAGIC_REPO_STRING
|
||||||
|
|
||||||
|
|
||||||
class InvalidConfigError(FatalError):
|
class InvalidConfigError(FatalError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -53,7 +60,12 @@ def try_regex(repo, hook, value, field_name):
|
||||||
|
|
||||||
def validate_config_extra(config):
|
def validate_config_extra(config):
|
||||||
for repo in config:
|
for repo in config:
|
||||||
if 'sha' not in repo:
|
if is_local_hooks(repo):
|
||||||
|
if 'sha' in repo:
|
||||||
|
raise InvalidConfigError(
|
||||||
|
'"sha" property provided for local hooks'
|
||||||
|
)
|
||||||
|
elif 'sha' not in repo:
|
||||||
raise InvalidConfigError(
|
raise InvalidConfigError(
|
||||||
'Missing "sha" field for repository {0}'.format(repo['repo'])
|
'Missing "sha" field for repository {0}'.format(repo['repo'])
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -18,15 +18,6 @@ from pre_commit.util import noop_context
|
||||||
logger = logging.getLogger('pre_commit')
|
logger = logging.getLogger('pre_commit')
|
||||||
|
|
||||||
|
|
||||||
class HookExecutor(object):
|
|
||||||
def __init__(self, hook, invoker):
|
|
||||||
self.hook = hook
|
|
||||||
self._invoker = invoker
|
|
||||||
|
|
||||||
def invoke(self, filenames):
|
|
||||||
return self._invoker(self.hook, filenames)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_skips(environ):
|
def _get_skips(environ):
|
||||||
skips = environ.get('SKIP', '')
|
skips = environ.get('SKIP', '')
|
||||||
return set(skip.strip() for skip in skips.split(',') if skip.strip())
|
return set(skip.strip() for skip in skips.split(',') if skip.strip())
|
||||||
|
|
@ -80,8 +71,7 @@ def get_filenames(args, include_expr, exclude_expr):
|
||||||
return getter(include_expr, exclude_expr)
|
return getter(include_expr, exclude_expr)
|
||||||
|
|
||||||
|
|
||||||
def _run_single_hook(hook_executor, args, write, skips=frozenset()):
|
def _run_single_hook(hook, repo, args, write, skips=frozenset()):
|
||||||
hook = hook_executor.hook
|
|
||||||
filenames = get_filenames(args, hook['files'], hook['exclude'])
|
filenames = get_filenames(args, hook['files'], hook['exclude'])
|
||||||
if hook['id'] in skips:
|
if hook['id'] in skips:
|
||||||
_print_user_skipped(hook, write, args)
|
_print_user_skipped(hook, write, args)
|
||||||
|
|
@ -95,7 +85,7 @@ def _run_single_hook(hook_executor, args, write, skips=frozenset()):
|
||||||
write(get_hook_message(_hook_msg_start(hook, args.verbose), end_len=6))
|
write(get_hook_message(_hook_msg_start(hook, args.verbose), end_len=6))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
retcode, stdout, stderr = hook_executor.invoke(filenames)
|
retcode, stdout, stderr = repo.run_hook(hook, filenames)
|
||||||
|
|
||||||
if retcode != hook['expected_return_value']:
|
if retcode != hook['expected_return_value']:
|
||||||
retcode = 1
|
retcode = 1
|
||||||
|
|
@ -119,19 +109,19 @@ def _run_single_hook(hook_executor, args, write, skips=frozenset()):
|
||||||
return retcode
|
return retcode
|
||||||
|
|
||||||
|
|
||||||
def _run_hooks(hook_executors, args, write, environ):
|
def _run_hooks(repo_hooks, args, write, environ):
|
||||||
"""Actually run the hooks."""
|
"""Actually run the hooks."""
|
||||||
skips = _get_skips(environ)
|
skips = _get_skips(environ)
|
||||||
retval = 0
|
retval = 0
|
||||||
for hook_executor in hook_executors:
|
for repo, hook in repo_hooks:
|
||||||
retval |= _run_single_hook(hook_executor, args, write, skips)
|
retval |= _run_single_hook(hook, repo, args, write, skips)
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
|
|
||||||
def get_hook_executors(runner):
|
def get_repo_hooks(runner):
|
||||||
for repo in runner.repositories:
|
for repo in runner.repositories:
|
||||||
for _, repo_hook in repo.hooks:
|
for _, hook in repo.hooks:
|
||||||
yield HookExecutor(repo_hook, repo.run_hook)
|
yield (repo, hook)
|
||||||
|
|
||||||
|
|
||||||
def _has_unmerged_paths(runner):
|
def _has_unmerged_paths(runner):
|
||||||
|
|
@ -159,13 +149,13 @@ def run(runner, args, write=sys_stdout_write_wrapper, environ=os.environ):
|
||||||
ctx = staged_files_only(runner.cmd_runner)
|
ctx = staged_files_only(runner.cmd_runner)
|
||||||
|
|
||||||
with ctx:
|
with ctx:
|
||||||
hook_executors = list(get_hook_executors(runner))
|
repo_hooks = list(get_repo_hooks(runner))
|
||||||
if args.hook:
|
if args.hook:
|
||||||
hook_executors = [
|
repo_hooks = [
|
||||||
he for he in hook_executors
|
(repo, hook) for repo, hook in repo_hooks
|
||||||
if he.hook['id'] == args.hook
|
if hook['id'] == args.hook
|
||||||
]
|
]
|
||||||
if not hook_executors:
|
if not repo_hooks:
|
||||||
write('No hook with id `{0}`\n'.format(args.hook))
|
write('No hook with id `{0}`\n'.format(args.hook))
|
||||||
return 1
|
return 1
|
||||||
return _run_hooks(hook_executors, args, write, environ)
|
return _run_hooks(repo_hooks, args, write, environ)
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,10 @@ import shutil
|
||||||
|
|
||||||
from cached_property import cached_property
|
from cached_property import cached_property
|
||||||
|
|
||||||
|
from pre_commit import git
|
||||||
|
from pre_commit.clientlib.validate_config import is_local_hooks
|
||||||
|
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
|
||||||
|
from pre_commit.jsonschema_extensions import apply_defaults
|
||||||
from pre_commit.languages.all import languages
|
from pre_commit.languages.all import languages
|
||||||
from pre_commit.manifest import Manifest
|
from pre_commit.manifest import Manifest
|
||||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||||
|
|
@ -21,10 +25,13 @@ class Repository(object):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, config, store):
|
def create(cls, config, store):
|
||||||
repo_path_getter = store.get_repo_path_getter(
|
if is_local_hooks(config):
|
||||||
config['repo'], config['sha']
|
return LocalRepository(config)
|
||||||
)
|
else:
|
||||||
return cls(config, repo_path_getter)
|
repo_path_getter = store.get_repo_path_getter(
|
||||||
|
config['repo'], config['sha']
|
||||||
|
)
|
||||||
|
return cls(config, repo_path_getter)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def repo_url(self):
|
def repo_url(self):
|
||||||
|
|
@ -111,3 +118,28 @@ class Repository(object):
|
||||||
return languages[hook['language']].run_hook(
|
return languages[hook['language']].run_hook(
|
||||||
self.cmd_runner, hook, file_args,
|
self.cmd_runner, hook, file_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalRepository(Repository):
|
||||||
|
def __init__(self, repo_config, repo_path_getter=None):
|
||||||
|
repo_path_getter = None
|
||||||
|
super(LocalRepository, self).__init__(repo_config, repo_path_getter)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def hooks(self):
|
||||||
|
return tuple(
|
||||||
|
(hook['id'], apply_defaults(hook, MANIFEST_JSON_SCHEMA['items']))
|
||||||
|
for hook in self.repo_config['hooks']
|
||||||
|
)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def cmd_runner(self):
|
||||||
|
return PrefixedCommandRunner(git.get_root())
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def sha(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def manifest(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
||||||
|
|
@ -60,12 +60,16 @@ def write_config(directory, config):
|
||||||
config_file.write(ordered_dump([config], **C.YAML_DUMP_KWARGS))
|
config_file.write(ordered_dump([config], **C.YAML_DUMP_KWARGS))
|
||||||
|
|
||||||
|
|
||||||
def make_consuming_repo(tmpdir_factory, repo_source):
|
def add_config_to_repo(git_path, config):
|
||||||
path = make_repo(tmpdir_factory, repo_source)
|
|
||||||
config = make_config_from_repo(path)
|
|
||||||
git_path = git_dir(tmpdir_factory)
|
|
||||||
write_config(git_path, config)
|
write_config(git_path, config)
|
||||||
with cwd(git_path):
|
with cwd(git_path):
|
||||||
cmd_output('git', 'add', C.CONFIG_FILE)
|
cmd_output('git', 'add', C.CONFIG_FILE)
|
||||||
cmd_output('git', 'commit', '-m', 'Add hooks config')
|
cmd_output('git', 'commit', '-m', 'Add hooks config')
|
||||||
return git_path
|
return git_path
|
||||||
|
|
||||||
|
|
||||||
|
def make_consuming_repo(tmpdir_factory, repo_source):
|
||||||
|
path = make_repo(tmpdir_factory, repo_source)
|
||||||
|
config = make_config_from_repo(path)
|
||||||
|
git_path = git_dir(tmpdir_factory)
|
||||||
|
return add_config_to_repo(git_path, config)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||||
|
|
@ -25,7 +26,7 @@ def test_run(input, expected_output):
|
||||||
assert run(input) == expected_output
|
assert run(input) == expected_output
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
|
@pytest.mark.parametrize(('config_obj', 'expected'), (
|
||||||
([], False),
|
([], False),
|
||||||
(
|
(
|
||||||
[{
|
[{
|
||||||
|
|
@ -66,8 +67,8 @@ def test_run(input, expected_output):
|
||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
def test_is_valid_according_to_schema(manifest_obj, expected):
|
def test_is_valid_according_to_schema(config_obj, expected):
|
||||||
ret = is_valid_according_to_schema(manifest_obj, CONFIG_JSON_SCHEMA)
|
ret = is_valid_according_to_schema(config_obj, CONFIG_JSON_SCHEMA)
|
||||||
assert ret is expected
|
assert ret is expected
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -121,3 +122,55 @@ def test_config_with_ok_exclude_regex_passes():
|
||||||
CONFIG_JSON_SCHEMA,
|
CONFIG_JSON_SCHEMA,
|
||||||
)
|
)
|
||||||
validate_config_extra(config)
|
validate_config_extra(config)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('config_obj', (
|
||||||
|
[{
|
||||||
|
'repo': 'local',
|
||||||
|
'sha': 'foo',
|
||||||
|
'hooks': [{
|
||||||
|
'id': 'do_not_commit',
|
||||||
|
'name': 'Block if "DO NOT COMMIT" is found',
|
||||||
|
'entry': 'DO NOT COMMIT',
|
||||||
|
'language': 'pcre',
|
||||||
|
'files': '^(.*)$',
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
))
|
||||||
|
def test_config_with_local_hooks_definition_fails(config_obj):
|
||||||
|
with pytest.raises((
|
||||||
|
jsonschema.exceptions.ValidationError, InvalidConfigError
|
||||||
|
)):
|
||||||
|
jsonschema.validate(config_obj, CONFIG_JSON_SCHEMA)
|
||||||
|
config = apply_defaults(config_obj, CONFIG_JSON_SCHEMA)
|
||||||
|
validate_config_extra(config)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('config_obj', (
|
||||||
|
[{
|
||||||
|
'repo': 'local',
|
||||||
|
'hooks': [{
|
||||||
|
'id': 'arg-per-line',
|
||||||
|
'name': 'Args per line hook',
|
||||||
|
'entry': 'bin/hook.sh',
|
||||||
|
'language': 'script',
|
||||||
|
'files': '',
|
||||||
|
'args': ['hello', 'world'],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
[{
|
||||||
|
'repo': 'local',
|
||||||
|
'hooks': [{
|
||||||
|
'id': 'arg-per-line',
|
||||||
|
'name': 'Args per line hook',
|
||||||
|
'entry': 'bin/hook.sh',
|
||||||
|
'language': 'script',
|
||||||
|
'files': '',
|
||||||
|
'args': ['hello', 'world'],
|
||||||
|
}]
|
||||||
|
}],
|
||||||
|
))
|
||||||
|
def test_config_with_local_hooks_definition_passes(config_obj):
|
||||||
|
jsonschema.validate(config_obj, CONFIG_JSON_SCHEMA)
|
||||||
|
config = apply_defaults(config_obj, CONFIG_JSON_SCHEMA)
|
||||||
|
validate_config_extra(config)
|
||||||
|
|
|
||||||
|
|
@ -14,10 +14,12 @@ from pre_commit.commands.run import _get_skips
|
||||||
from pre_commit.commands.run import _has_unmerged_paths
|
from pre_commit.commands.run import _has_unmerged_paths
|
||||||
from pre_commit.commands.run import get_changed_files
|
from pre_commit.commands.run import get_changed_files
|
||||||
from pre_commit.commands.run import run
|
from pre_commit.commands.run import run
|
||||||
|
from pre_commit.ordereddict import OrderedDict
|
||||||
from pre_commit.runner import Runner
|
from pre_commit.runner import Runner
|
||||||
from pre_commit.util import cmd_output
|
from pre_commit.util import cmd_output
|
||||||
from pre_commit.util import cwd
|
from pre_commit.util import cwd
|
||||||
from testing.auto_namedtuple import auto_namedtuple
|
from testing.auto_namedtuple import auto_namedtuple
|
||||||
|
from testing.fixtures import add_config_to_repo
|
||||||
from testing.fixtures import make_consuming_repo
|
from testing.fixtures import make_consuming_repo
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -81,7 +83,7 @@ def _test_run(repo, options, expected_outputs, expected_ret, stage):
|
||||||
stage_a_file()
|
stage_a_file()
|
||||||
args = _get_opts(**options)
|
args = _get_opts(**options)
|
||||||
ret, printed = _do_run(repo, args)
|
ret, printed = _do_run(repo, args)
|
||||||
assert ret == expected_ret
|
assert ret == expected_ret, (ret, expected_ret, printed)
|
||||||
for expected_output_part in expected_outputs:
|
for expected_output_part in expected_outputs:
|
||||||
assert expected_output_part in printed
|
assert expected_output_part in printed
|
||||||
|
|
||||||
|
|
@ -313,9 +315,7 @@ def test_lots_of_files(mock_out_store_directory, tmpdir_factory):
|
||||||
git_path = make_consuming_repo(tmpdir_factory, 'python_hooks_repo')
|
git_path = make_consuming_repo(tmpdir_factory, 'python_hooks_repo')
|
||||||
with cwd(git_path):
|
with cwd(git_path):
|
||||||
# Override files so we run against them
|
# Override files so we run against them
|
||||||
with io.open(
|
with io.open('.pre-commit-config.yaml', 'a+') as config_file:
|
||||||
'.pre-commit-config.yaml', 'a+',
|
|
||||||
) as config_file:
|
|
||||||
config_file.write(' files: ""\n')
|
config_file.write(' files: ""\n')
|
||||||
|
|
||||||
# Write a crap ton of files
|
# Write a crap ton of files
|
||||||
|
|
@ -334,3 +334,66 @@ def test_lots_of_files(mock_out_store_directory, tmpdir_factory):
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
env=env,
|
env=env,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_local_hook_passes(
|
||||||
|
repo_with_passing_hook, mock_out_store_directory,
|
||||||
|
):
|
||||||
|
config = OrderedDict((
|
||||||
|
('repo', 'local'),
|
||||||
|
('hooks', (OrderedDict((
|
||||||
|
('id', 'pylint'),
|
||||||
|
('name', 'PyLint'),
|
||||||
|
('entry', 'python -m pylint.__main__'),
|
||||||
|
('language', 'system'),
|
||||||
|
('files', r'\.py$'),
|
||||||
|
)), OrderedDict((
|
||||||
|
('id', 'do_not_commit'),
|
||||||
|
('name', 'Block if "DO NOT COMMIT" is found'),
|
||||||
|
('entry', 'DO NOT COMMIT'),
|
||||||
|
('language', 'pcre'),
|
||||||
|
('files', '^(.*)$'),
|
||||||
|
))))
|
||||||
|
))
|
||||||
|
add_config_to_repo(repo_with_passing_hook, config)
|
||||||
|
|
||||||
|
with io.open('dummy.py', 'w') as staged_file:
|
||||||
|
staged_file.write('"""TODO: something"""\n')
|
||||||
|
cmd_output('git', 'add', 'dummy.py')
|
||||||
|
|
||||||
|
_test_run(
|
||||||
|
repo_with_passing_hook,
|
||||||
|
options={},
|
||||||
|
expected_outputs=[''],
|
||||||
|
expected_ret=0,
|
||||||
|
stage=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_local_hook_fails(
|
||||||
|
repo_with_passing_hook, mock_out_store_directory,
|
||||||
|
):
|
||||||
|
config = OrderedDict((
|
||||||
|
('repo', 'local'),
|
||||||
|
('hooks', [OrderedDict((
|
||||||
|
('id', 'no-todo'),
|
||||||
|
('name', 'No TODO'),
|
||||||
|
('entry', 'grep -iI todo'),
|
||||||
|
('expected_return_value', 1),
|
||||||
|
('language', 'system'),
|
||||||
|
('files', ''),
|
||||||
|
))])
|
||||||
|
))
|
||||||
|
add_config_to_repo(repo_with_passing_hook, config)
|
||||||
|
|
||||||
|
with io.open('dummy.py', 'w') as staged_file:
|
||||||
|
staged_file.write('"""TODO: something"""\n')
|
||||||
|
cmd_output('git', 'add', 'dummy.py')
|
||||||
|
|
||||||
|
_test_run(
|
||||||
|
repo_with_passing_hook,
|
||||||
|
options={},
|
||||||
|
expected_outputs=[''],
|
||||||
|
expected_ret=1,
|
||||||
|
stage=False
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||||
from pre_commit.jsonschema_extensions import apply_defaults
|
from pre_commit.jsonschema_extensions import apply_defaults
|
||||||
from pre_commit.languages.python import PythonEnv
|
from pre_commit.languages.python import PythonEnv
|
||||||
|
from pre_commit.ordereddict import OrderedDict
|
||||||
from pre_commit.repository import Repository
|
from pre_commit.repository import Repository
|
||||||
from pre_commit.util import cmd_output
|
from pre_commit.util import cmd_output
|
||||||
from pre_commit.util import cwd
|
from pre_commit.util import cwd
|
||||||
|
|
@ -377,3 +378,22 @@ def test_tags_on_repositories(in_tmpdir, tmpdir_factory, store):
|
||||||
ret = repo_2.run_hook(repo_2.hooks[0][1], ['bar'])
|
ret = repo_2.run_hook(repo_2.hooks[0][1], ['bar'])
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
assert ret[1] == 'bar\nHello World\n'
|
assert ret[1] == 'bar\nHello World\n'
|
||||||
|
|
||||||
|
|
||||||
|
def test_local_repository():
|
||||||
|
config = OrderedDict((
|
||||||
|
('repo', 'local'),
|
||||||
|
('hooks', [OrderedDict((
|
||||||
|
('id', 'do_not_commit'),
|
||||||
|
('name', 'Block if "DO NOT COMMIT" is found'),
|
||||||
|
('entry', 'DO NOT COMMIT'),
|
||||||
|
('language', 'pcre'),
|
||||||
|
('files', '^(.*)$'),
|
||||||
|
))])
|
||||||
|
))
|
||||||
|
local_repo = Repository.create(config, 'dummy')
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
local_repo.sha
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
local_repo.manifest
|
||||||
|
assert len(local_repo.hooks) == 1
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,10 @@ import os
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
|
from pre_commit.ordereddict import OrderedDict
|
||||||
from pre_commit.runner import Runner
|
from pre_commit.runner import Runner
|
||||||
from pre_commit.util import cwd
|
from pre_commit.util import cwd
|
||||||
|
from testing.fixtures import add_config_to_repo
|
||||||
from testing.fixtures import git_dir
|
from testing.fixtures import git_dir
|
||||||
from testing.fixtures import make_consuming_repo
|
from testing.fixtures import make_consuming_repo
|
||||||
|
|
||||||
|
|
@ -52,6 +54,31 @@ def test_repositories(tmpdir_factory, mock_out_store_directory):
|
||||||
assert len(runner.repositories) == 1
|
assert len(runner.repositories) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_local_hooks(tmpdir_factory, mock_out_store_directory):
|
||||||
|
config = OrderedDict((
|
||||||
|
('repo', 'local'),
|
||||||
|
('hooks', (OrderedDict((
|
||||||
|
('id', 'arg-per-line'),
|
||||||
|
('name', 'Args per line hook'),
|
||||||
|
('entry', 'bin/hook.sh'),
|
||||||
|
('language', 'script'),
|
||||||
|
('files', ''),
|
||||||
|
('args', ['hello', 'world']),
|
||||||
|
)), OrderedDict((
|
||||||
|
('id', 'do_not_commit'),
|
||||||
|
('name', 'Block if "DO NOT COMMIT" is found'),
|
||||||
|
('entry', 'DO NOT COMMIT'),
|
||||||
|
('language', 'pcre'),
|
||||||
|
('files', '^(.*)$'),
|
||||||
|
))))
|
||||||
|
))
|
||||||
|
git_path = git_dir(tmpdir_factory)
|
||||||
|
add_config_to_repo(git_path, config)
|
||||||
|
runner = Runner(git_path)
|
||||||
|
assert len(runner.repositories) == 1
|
||||||
|
assert len(runner.repositories[0].hooks) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_pre_commit_path():
|
def test_pre_commit_path():
|
||||||
runner = Runner(os.path.join('foo', 'bar'))
|
runner = Runner(os.path.join('foo', 'bar'))
|
||||||
expected_path = os.path.join('foo', 'bar', '.git', 'hooks', 'pre-commit')
|
expected_path = os.path.join('foo', 'bar', '.git', 'hooks', 'pre-commit')
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue