mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 00:04:42 +04:00
Implement Store. pre-commit now installs files to ~/.pre-commit
This commit is contained in:
parent
26af2cecab
commit
479eb51873
16 changed files with 457 additions and 234 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -4,7 +4,6 @@
|
||||||
.*.sw[a-z]
|
.*.sw[a-z]
|
||||||
.coverage
|
.coverage
|
||||||
.idea
|
.idea
|
||||||
.pre-commit-files
|
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
.tox
|
.tox
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ class RepositoryCannotBeUpdatedError(RuntimeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _update_repository(repo_config):
|
def _update_repository(repo_config, runner):
|
||||||
"""Updates a repository to the tip of `master`. If the repository cannot
|
"""Updates a repository to the tip of `master`. If the repository cannot
|
||||||
be updated because a hook that is configured does not exist in `master`,
|
be updated because a hook that is configured does not exist in `master`,
|
||||||
this raises a RepositoryCannotBeUpdatedError
|
this raises a RepositoryCannotBeUpdatedError
|
||||||
|
|
@ -64,9 +64,9 @@ def _update_repository(repo_config):
|
||||||
Args:
|
Args:
|
||||||
repo_config - A config for a repository
|
repo_config - A config for a repository
|
||||||
"""
|
"""
|
||||||
repo = Repository(repo_config)
|
repo = Repository.create(repo_config, runner.store)
|
||||||
|
|
||||||
with repo.in_checkout():
|
with local.cwd(repo.repo_path_getter.repo_path):
|
||||||
local['git']['fetch']()
|
local['git']['fetch']()
|
||||||
head_sha = local['git']['rev-parse', 'origin/master']().strip()
|
head_sha = local['git']['rev-parse', 'origin/master']().strip()
|
||||||
|
|
||||||
|
|
@ -77,11 +77,11 @@ def _update_repository(repo_config):
|
||||||
# Construct a new config with the head sha
|
# Construct a new config with the head sha
|
||||||
new_config = OrderedDict(repo_config)
|
new_config = OrderedDict(repo_config)
|
||||||
new_config['sha'] = head_sha
|
new_config['sha'] = head_sha
|
||||||
new_repo = Repository(new_config)
|
new_repo = Repository.create(new_config, runner.store)
|
||||||
|
|
||||||
# See if any of our hooks were deleted with the new commits
|
# See if any of our hooks were deleted with the new commits
|
||||||
hooks = set(repo.hooks.keys())
|
hooks = set(repo.hooks.keys())
|
||||||
hooks_missing = hooks - (hooks & set(new_repo.manifest.keys()))
|
hooks_missing = hooks - (hooks & set(new_repo.manifest.hooks.keys()))
|
||||||
if hooks_missing:
|
if hooks_missing:
|
||||||
raise RepositoryCannotBeUpdatedError(
|
raise RepositoryCannotBeUpdatedError(
|
||||||
'Cannot update because the tip of master is missing these hooks:\n'
|
'Cannot update because the tip of master is missing these hooks:\n'
|
||||||
|
|
@ -106,7 +106,7 @@ def autoupdate(runner):
|
||||||
sys.stdout.write('Updating {0}...'.format(repo_config['repo']))
|
sys.stdout.write('Updating {0}...'.format(repo_config['repo']))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
try:
|
try:
|
||||||
new_repo_config = _update_repository(repo_config)
|
new_repo_config = _update_repository(repo_config, runner)
|
||||||
except RepositoryCannotBeUpdatedError as error:
|
except RepositoryCannotBeUpdatedError as error:
|
||||||
print(error.args[0])
|
print(error.args[0])
|
||||||
output_configs.append(repo_config)
|
output_configs.append(repo_config)
|
||||||
|
|
@ -135,9 +135,9 @@ def autoupdate(runner):
|
||||||
|
|
||||||
|
|
||||||
def clean(runner):
|
def clean(runner):
|
||||||
if os.path.exists(runner.hooks_workspace_path):
|
if os.path.exists(runner.store.directory):
|
||||||
shutil.rmtree(runner.hooks_workspace_path)
|
shutil.rmtree(runner.store.directory)
|
||||||
print('Cleaned {0}.'.format(runner.hooks_workspace_path))
|
print('Cleaned {0}.'.format(runner.store.directory))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
CONFIG_FILE = '.pre-commit-config.yaml'
|
CONFIG_FILE = '.pre-commit-config.yaml'
|
||||||
|
|
||||||
HOOKS_WORKSPACE = '.pre-commit-files'
|
|
||||||
|
|
||||||
MANIFEST_FILE = 'hooks.yaml'
|
MANIFEST_FILE = 'hooks.yaml'
|
||||||
|
|
||||||
YAML_DUMP_KWARGS = {
|
YAML_DUMP_KWARGS = {
|
||||||
|
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
import contextlib
|
|
||||||
import os.path
|
|
||||||
from plumbum import local
|
|
||||||
|
|
||||||
import pre_commit.constants as C
|
|
||||||
from pre_commit import git
|
|
||||||
|
|
||||||
|
|
||||||
def get_pre_commit_dir_path():
|
|
||||||
return os.path.join(git.get_root(), C.HOOKS_WORKSPACE)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def in_hooks_workspace():
|
|
||||||
"""Change into the hooks workspace. If it does not exist create it."""
|
|
||||||
if not os.path.exists(get_pre_commit_dir_path()):
|
|
||||||
local.path(get_pre_commit_dir_path()).mkdir()
|
|
||||||
|
|
||||||
with local.cwd(get_pre_commit_dir_path()):
|
|
||||||
yield
|
|
||||||
21
pre_commit/manifest.py
Normal file
21
pre_commit/manifest.py
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
import pre_commit.constants as C
|
||||||
|
from pre_commit.clientlib.validate_manifest import load_manifest
|
||||||
|
from pre_commit.util import cached_property
|
||||||
|
|
||||||
|
|
||||||
|
class Manifest(object):
|
||||||
|
def __init__(self, repo_path_getter):
|
||||||
|
self.repo_path_getter = repo_path_getter
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def manifest_contents(self):
|
||||||
|
manifest_path = os.path.join(
|
||||||
|
self.repo_path_getter.repo_path, C.MANIFEST_FILE,
|
||||||
|
)
|
||||||
|
return load_manifest(manifest_path)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def hooks(self):
|
||||||
|
return dict((hook['id'], hook) for hook in self.manifest_contents)
|
||||||
|
|
@ -1,27 +1,24 @@
|
||||||
import contextlib
|
|
||||||
import logging
|
|
||||||
from asottile.ordereddict import OrderedDict
|
from asottile.ordereddict import OrderedDict
|
||||||
from plumbum import local
|
|
||||||
|
|
||||||
import pre_commit.constants as C
|
|
||||||
from pre_commit import five
|
|
||||||
from pre_commit.clientlib.validate_manifest import load_manifest
|
|
||||||
from pre_commit.hooks_workspace import in_hooks_workspace
|
|
||||||
from pre_commit.languages.all import languages
|
from pre_commit.languages.all import languages
|
||||||
|
from pre_commit.manifest import Manifest
|
||||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||||
from pre_commit.util import cached_property
|
from pre_commit.util import cached_property
|
||||||
from pre_commit.util import clean_path_on_failure
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('pre_commit')
|
|
||||||
|
|
||||||
|
|
||||||
class Repository(object):
|
class Repository(object):
|
||||||
def __init__(self, repo_config):
|
def __init__(self, repo_config, repo_path_getter):
|
||||||
self.repo_config = repo_config
|
self.repo_config = repo_config
|
||||||
self.__created = False
|
self.repo_path_getter = repo_path_getter
|
||||||
self.__installed = False
|
self.__installed = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, config, store):
|
||||||
|
repo_path_getter = store.get_repo_path_getter(
|
||||||
|
config['repo'], config['sha']
|
||||||
|
)
|
||||||
|
return cls(config, repo_path_getter)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def repo_url(self):
|
def repo_url(self):
|
||||||
return self.repo_config['repo']
|
return self.repo_config['repo']
|
||||||
|
|
@ -36,46 +33,22 @@ class Repository(object):
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def hooks(self):
|
def hooks(self):
|
||||||
|
# TODO: merging in manifest dicts is a smell imo
|
||||||
return OrderedDict(
|
return OrderedDict(
|
||||||
(hook['id'], dict(hook, **self.manifest[hook['id']]))
|
(hook['id'], dict(hook, **self.manifest.hooks[hook['id']]))
|
||||||
for hook in self.repo_config['hooks']
|
for hook in self.repo_config['hooks']
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def manifest(self):
|
def manifest(self):
|
||||||
with self.in_checkout():
|
return Manifest(self.repo_path_getter)
|
||||||
return dict(
|
|
||||||
(hook['id'], hook)
|
|
||||||
for hook in load_manifest(C.MANIFEST_FILE)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_cmd_runner(self, hooks_cmd_runner):
|
def get_cmd_runner(self, hooks_cmd_runner):
|
||||||
|
# TODO: this effectively throws away the original cmd runner
|
||||||
return PrefixedCommandRunner.from_command_runner(
|
return PrefixedCommandRunner.from_command_runner(
|
||||||
hooks_cmd_runner, self.sha,
|
hooks_cmd_runner, self.repo_path_getter.repo_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
def require_created(self):
|
|
||||||
if self.__created:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.create()
|
|
||||||
self.__created = True
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
with in_hooks_workspace():
|
|
||||||
if local.path(self.sha).exists():
|
|
||||||
# Project already exists, no reason to re-create it
|
|
||||||
return
|
|
||||||
|
|
||||||
# Checking out environment for the first time
|
|
||||||
logger.info('Installing environment for {0}.'.format(self.repo_url))
|
|
||||||
logger.info('Once installed this environment will be reused.')
|
|
||||||
logger.info('This may take a few minutes...')
|
|
||||||
with clean_path_on_failure(five.u(local.path(self.sha))):
|
|
||||||
local['git']['clone', '--no-checkout', self.repo_url, self.sha]()
|
|
||||||
with self.in_checkout():
|
|
||||||
local['git']['checkout', self.sha]()
|
|
||||||
|
|
||||||
def require_installed(self, cmd_runner):
|
def require_installed(self, cmd_runner):
|
||||||
if self.__installed:
|
if self.__installed:
|
||||||
return
|
return
|
||||||
|
|
@ -89,7 +62,6 @@ class Repository(object):
|
||||||
Args:
|
Args:
|
||||||
cmd_runner - A `PrefixedCommandRunner` bound to the hooks workspace
|
cmd_runner - A `PrefixedCommandRunner` bound to the hooks workspace
|
||||||
"""
|
"""
|
||||||
self.require_created()
|
|
||||||
repo_cmd_runner = self.get_cmd_runner(cmd_runner)
|
repo_cmd_runner = self.get_cmd_runner(cmd_runner)
|
||||||
for language_name in self.languages:
|
for language_name in self.languages:
|
||||||
language = languages[language_name]
|
language = languages[language_name]
|
||||||
|
|
@ -101,13 +73,6 @@ class Repository(object):
|
||||||
continue
|
continue
|
||||||
language.install_environment(repo_cmd_runner)
|
language.install_environment(repo_cmd_runner)
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def in_checkout(self):
|
|
||||||
self.require_created()
|
|
||||||
with in_hooks_workspace():
|
|
||||||
with local.cwd(self.sha):
|
|
||||||
yield
|
|
||||||
|
|
||||||
def run_hook(self, cmd_runner, hook_id, file_args):
|
def run_hook(self, cmd_runner, hook_id, file_args):
|
||||||
"""Run a hook.
|
"""Run a hook.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,8 @@ import os.path
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import git
|
from pre_commit import git
|
||||||
from pre_commit.clientlib.validate_config import load_config
|
from pre_commit.clientlib.validate_config import load_config
|
||||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
|
||||||
from pre_commit.repository import Repository
|
from pre_commit.repository import Repository
|
||||||
|
from pre_commit.store import Store
|
||||||
from pre_commit.util import cached_property
|
from pre_commit.util import cached_property
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -27,10 +27,6 @@ class Runner(object):
|
||||||
os.chdir(root)
|
os.chdir(root)
|
||||||
return cls(root)
|
return cls(root)
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def hooks_workspace_path(self):
|
|
||||||
return os.path.join(self.git_root, C.HOOKS_WORKSPACE)
|
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def config_file_path(self):
|
def config_file_path(self):
|
||||||
return os.path.join(self.git_root, C.CONFIG_FILE)
|
return os.path.join(self.git_root, C.CONFIG_FILE)
|
||||||
|
|
@ -39,7 +35,7 @@ class Runner(object):
|
||||||
def repositories(self):
|
def repositories(self):
|
||||||
"""Returns a tuple of the configured repositories."""
|
"""Returns a tuple of the configured repositories."""
|
||||||
config = load_config(self.config_file_path)
|
config = load_config(self.config_file_path)
|
||||||
return tuple(Repository(x) for x in config)
|
return tuple(Repository.create(x, self.store) for x in config)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def pre_commit_path(self):
|
def pre_commit_path(self):
|
||||||
|
|
@ -47,4 +43,9 @@ class Runner(object):
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def cmd_runner(self):
|
def cmd_runner(self):
|
||||||
return PrefixedCommandRunner(self.hooks_workspace_path)
|
# TODO: remove this and inline runner.store.cmd_runner
|
||||||
|
return self.store.cmd_runner
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def store(self):
|
||||||
|
return Store()
|
||||||
|
|
|
||||||
97
pre_commit/store.py
Normal file
97
pre_commit/store.py
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import tempfile
|
||||||
|
from plumbum import local
|
||||||
|
|
||||||
|
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||||
|
from pre_commit.util import cached_property
|
||||||
|
from pre_commit.util import clean_path_on_failure
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('pre_commit')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_default_directory():
|
||||||
|
"""Returns the default directory for the Store. This is intentionally
|
||||||
|
underscored to indicate that `Store.get_default_directory` is the intended
|
||||||
|
way to get this information. This is also done so
|
||||||
|
`Store.get_default_directory` can be mocked in tests and
|
||||||
|
`_get_default_directory` can be tested.
|
||||||
|
"""
|
||||||
|
return os.path.join(os.environ['HOME'], '.pre-commit')
|
||||||
|
|
||||||
|
|
||||||
|
class Store(object):
|
||||||
|
get_default_directory = staticmethod(_get_default_directory)
|
||||||
|
|
||||||
|
class RepoPathGetter(object):
|
||||||
|
def __init__(self, repo, sha, store):
|
||||||
|
self._repo = repo
|
||||||
|
self._sha = sha
|
||||||
|
self._store = store
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def repo_path(self):
|
||||||
|
return self._store.clone(self._repo, self._sha)
|
||||||
|
|
||||||
|
def __init__(self, directory=None):
|
||||||
|
if directory is None:
|
||||||
|
directory = self.get_default_directory()
|
||||||
|
|
||||||
|
self.directory = directory
|
||||||
|
self.__created = False
|
||||||
|
|
||||||
|
def _write_readme(self):
|
||||||
|
with io.open(os.path.join(self.directory, 'README'), 'w') as readme:
|
||||||
|
readme.write(
|
||||||
|
'This directory is maintained by the pre-commit project.\n'
|
||||||
|
'Learn more: https://github.com/pre-commit/pre-commit\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
def _create(self):
|
||||||
|
if os.path.exists(self.directory):
|
||||||
|
return
|
||||||
|
os.makedirs(self.directory)
|
||||||
|
self._write_readme()
|
||||||
|
|
||||||
|
def require_created(self):
|
||||||
|
"""Require the pre-commit file store to be created."""
|
||||||
|
if self.__created:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._create()
|
||||||
|
self.__created = True
|
||||||
|
|
||||||
|
def clone(self, url, sha):
|
||||||
|
"""Clone the given url and checkout the specific sha."""
|
||||||
|
self.require_created()
|
||||||
|
|
||||||
|
# Check if we already exist
|
||||||
|
sha_path = os.path.join(self.directory, sha)
|
||||||
|
if os.path.exists(sha_path):
|
||||||
|
return os.readlink(sha_path)
|
||||||
|
|
||||||
|
logger.info('Installing environment for {0}.'.format(url))
|
||||||
|
logger.info('Once installed this environment will be reused.')
|
||||||
|
logger.info('This may take a few minutes...')
|
||||||
|
|
||||||
|
dir = tempfile.mkdtemp(prefix='repo', dir=self.directory)
|
||||||
|
with clean_path_on_failure(dir):
|
||||||
|
local['git']('clone', '--no-checkout', url, dir)
|
||||||
|
with local.cwd(dir):
|
||||||
|
local['git']('checkout', sha)
|
||||||
|
|
||||||
|
# Make a symlink from sha->repo
|
||||||
|
os.symlink(dir, sha_path)
|
||||||
|
return dir
|
||||||
|
|
||||||
|
def get_repo_path_getter(self, repo, sha):
|
||||||
|
return self.RepoPathGetter(repo, sha, self)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def cmd_runner(self):
|
||||||
|
return PrefixedCommandRunner(self.directory)
|
||||||
|
|
@ -20,6 +20,11 @@ from testing.util import get_head_sha
|
||||||
from testing.util import get_resource_path
|
from testing.util import get_resource_path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def runner_with_mocked_store(mock_out_store_directory):
|
||||||
|
yield Runner('/')
|
||||||
|
|
||||||
|
|
||||||
def test_install_pre_commit(empty_git_dir):
|
def test_install_pre_commit(empty_git_dir):
|
||||||
runner = Runner(empty_git_dir)
|
runner = Runner(empty_git_dir)
|
||||||
ret = commands.install(runner)
|
ret = commands.install(runner)
|
||||||
|
|
@ -70,13 +75,15 @@ def up_to_date_repo(python_hooks_repo):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_up_to_date_repo(up_to_date_repo):
|
def test_up_to_date_repo(up_to_date_repo, runner_with_mocked_store):
|
||||||
input_sha = up_to_date_repo.repo_config['sha']
|
input_sha = up_to_date_repo.repo_config['sha']
|
||||||
ret = commands._update_repository(up_to_date_repo.repo_config)
|
ret = commands._update_repository(
|
||||||
|
up_to_date_repo.repo_config, runner_with_mocked_store,
|
||||||
|
)
|
||||||
assert ret['sha'] == input_sha
|
assert ret['sha'] == input_sha
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_up_to_date_repo(up_to_date_repo):
|
def test_autoupdate_up_to_date_repo(up_to_date_repo, mock_out_store_directory):
|
||||||
before = open(C.CONFIG_FILE).read()
|
before = open(C.CONFIG_FILE).read()
|
||||||
runner = Runner(up_to_date_repo.python_hooks_repo)
|
runner = Runner(up_to_date_repo.python_hooks_repo)
|
||||||
ret = commands.autoupdate(runner)
|
ret = commands.autoupdate(runner)
|
||||||
|
|
@ -110,18 +117,22 @@ def out_of_date_repo(python_hooks_repo):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_out_of_date_repo(out_of_date_repo):
|
def test_out_of_date_repo(out_of_date_repo, runner_with_mocked_store):
|
||||||
ret = commands._update_repository(out_of_date_repo.repo_config)
|
ret = commands._update_repository(
|
||||||
|
out_of_date_repo.repo_config, runner_with_mocked_store,
|
||||||
|
)
|
||||||
assert ret['sha'] == out_of_date_repo.head_sha
|
assert ret['sha'] == out_of_date_repo.head_sha
|
||||||
|
|
||||||
|
|
||||||
def test_removes_defaults(out_of_date_repo):
|
def test_removes_defaults(out_of_date_repo, runner_with_mocked_store):
|
||||||
ret = commands._update_repository(out_of_date_repo.repo_config)
|
ret = commands._update_repository(
|
||||||
|
out_of_date_repo.repo_config, runner_with_mocked_store,
|
||||||
|
)
|
||||||
assert 'args' not in ret['hooks'][0]
|
assert 'args' not in ret['hooks'][0]
|
||||||
assert 'expected_return_value' not in ret['hooks'][0]
|
assert 'expected_return_value' not in ret['hooks'][0]
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_out_of_date_repo(out_of_date_repo):
|
def test_autoupdate_out_of_date_repo(out_of_date_repo, mock_out_store_directory):
|
||||||
before = open(C.CONFIG_FILE).read()
|
before = open(C.CONFIG_FILE).read()
|
||||||
runner = Runner(out_of_date_repo.python_hooks_repo)
|
runner = Runner(out_of_date_repo.python_hooks_repo)
|
||||||
ret = commands.autoupdate(runner)
|
ret = commands.autoupdate(runner)
|
||||||
|
|
@ -156,12 +167,14 @@ def hook_disappearing_repo(python_hooks_repo):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_hook_disppearing_repo_raises(hook_disappearing_repo):
|
def test_hook_disppearing_repo_raises(hook_disappearing_repo, runner_with_mocked_store):
|
||||||
with pytest.raises(commands.RepositoryCannotBeUpdatedError):
|
with pytest.raises(commands.RepositoryCannotBeUpdatedError):
|
||||||
commands._update_repository(hook_disappearing_repo.repo_config)
|
commands._update_repository(
|
||||||
|
hook_disappearing_repo.repo_config, runner_with_mocked_store,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_hook_disappearing_repo(hook_disappearing_repo):
|
def test_autoupdate_hook_disappearing_repo(hook_disappearing_repo, mock_out_store_directory):
|
||||||
before = open(C.CONFIG_FILE).read()
|
before = open(C.CONFIG_FILE).read()
|
||||||
runner = Runner(hook_disappearing_repo.python_hooks_repo)
|
runner = Runner(hook_disappearing_repo.python_hooks_repo)
|
||||||
ret = commands.autoupdate(runner)
|
ret = commands.autoupdate(runner)
|
||||||
|
|
@ -170,16 +183,18 @@ def test_autoupdate_hook_disappearing_repo(hook_disappearing_repo):
|
||||||
assert before == after
|
assert before == after
|
||||||
|
|
||||||
|
|
||||||
def test_clean(empty_git_dir):
|
def test_clean(runner_with_mocked_store):
|
||||||
os.mkdir(C.HOOKS_WORKSPACE)
|
assert os.path.exists(runner_with_mocked_store.store.directory)
|
||||||
commands.clean(Runner(empty_git_dir))
|
commands.clean(runner_with_mocked_store)
|
||||||
assert not os.path.exists(C.HOOKS_WORKSPACE)
|
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||||
|
|
||||||
|
|
||||||
def test_clean_empty(empty_git_dir):
|
def test_clean_empty(runner_with_mocked_store):
|
||||||
assert not os.path.exists(C.HOOKS_WORKSPACE)
|
"""Make sure clean succeeds when we the directory doesn't exist."""
|
||||||
commands.clean(Runner(empty_git_dir))
|
shutil.rmtree(runner_with_mocked_store.store.directory)
|
||||||
assert not os.path.exists(C.HOOKS_WORKSPACE)
|
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||||
|
commands.clean(runner_with_mocked_store)
|
||||||
|
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||||
|
|
||||||
|
|
||||||
def stage_a_file():
|
def stage_a_file():
|
||||||
|
|
@ -219,7 +234,7 @@ def _test_run(repo, options, expected_outputs, expected_ret, stage):
|
||||||
assert expected_output_part in printed
|
assert expected_output_part in printed
|
||||||
|
|
||||||
|
|
||||||
def test_run_all_hooks_failing(repo_with_failing_hook):
|
def test_run_all_hooks_failing(repo_with_failing_hook, mock_out_store_directory):
|
||||||
_test_run(
|
_test_run(
|
||||||
repo_with_failing_hook,
|
repo_with_failing_hook,
|
||||||
{},
|
{},
|
||||||
|
|
@ -247,7 +262,7 @@ def test_run_all_hooks_failing(repo_with_failing_hook):
|
||||||
({}, ('Bash hook', '(no files to check)', 'Skipped'), 0, False),
|
({}, ('Bash hook', '(no files to check)', 'Skipped'), 0, False),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
def test_run(repo_with_passing_hook, options, outputs, expected_ret, stage):
|
def test_run(repo_with_passing_hook, options, outputs, expected_ret, stage, mock_out_store_directory):
|
||||||
_test_run(repo_with_passing_hook, options, outputs, expected_ret, stage)
|
_test_run(repo_with_passing_hook, options, outputs, expected_ret, stage)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -260,7 +275,7 @@ def test_run(repo_with_passing_hook, options, outputs, expected_ret, stage):
|
||||||
(False, False, True),
|
(False, False, True),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_no_stash(repo_with_passing_hook, no_stash, all_files, expect_stash):
|
def test_no_stash(repo_with_passing_hook, no_stash, all_files, expect_stash, mock_out_store_directory):
|
||||||
stage_a_file()
|
stage_a_file()
|
||||||
# Make unstaged changes
|
# Make unstaged changes
|
||||||
with open('foo.py', 'w') as foo_file:
|
with open('foo.py', 'w') as foo_file:
|
||||||
|
|
@ -283,13 +298,13 @@ def test_has_unmerged_paths(output, expected):
|
||||||
assert commands._has_unmerged_paths(mock_runner) is expected
|
assert commands._has_unmerged_paths(mock_runner) is expected
|
||||||
|
|
||||||
|
|
||||||
def test_merge_conflict(in_merge_conflict):
|
def test_merge_conflict(in_merge_conflict, mock_out_store_directory):
|
||||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||||
assert ret == 1
|
assert ret == 1
|
||||||
assert 'Unmerged files. Resolve before committing.' in printed
|
assert 'Unmerged files. Resolve before committing.' in printed
|
||||||
|
|
||||||
|
|
||||||
def test_merge_conflict_modified(in_merge_conflict):
|
def test_merge_conflict_modified(in_merge_conflict, mock_out_store_directory):
|
||||||
# Touch another file so we have unstaged non-conflicting things
|
# Touch another file so we have unstaged non-conflicting things
|
||||||
assert os.path.exists('dummy')
|
assert os.path.exists('dummy')
|
||||||
with open('dummy', 'w') as dummy_file:
|
with open('dummy', 'w') as dummy_file:
|
||||||
|
|
@ -300,7 +315,7 @@ def test_merge_conflict_modified(in_merge_conflict):
|
||||||
assert 'Unmerged files. Resolve before committing.' in printed
|
assert 'Unmerged files. Resolve before committing.' in printed
|
||||||
|
|
||||||
|
|
||||||
def test_merge_conflict_resolved(in_merge_conflict):
|
def test_merge_conflict_resolved(in_merge_conflict, mock_out_store_directory):
|
||||||
local['git']['add', '.']()
|
local['git']['add', '.']()
|
||||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||||
for msg in ('Checking merge-conflict files only.', 'Bash hook', 'Passed'):
|
for msg in ('Checking merge-conflict files only.', 'Bash hook', 'Passed'):
|
||||||
|
|
@ -324,7 +339,7 @@ def test_get_skips(environ, expected_output):
|
||||||
assert ret == expected_output
|
assert ret == expected_output
|
||||||
|
|
||||||
|
|
||||||
def test_skip_hook(repo_with_passing_hook):
|
def test_skip_hook(repo_with_passing_hook, mock_out_store_directory):
|
||||||
ret, printed = _do_run(
|
ret, printed = _do_run(
|
||||||
repo_with_passing_hook, _get_opts(), {'SKIP': 'bash_hook'},
|
repo_with_passing_hook, _get_opts(), {'SKIP': 'bash_hook'},
|
||||||
)
|
)
|
||||||
|
|
@ -332,11 +347,11 @@ def test_skip_hook(repo_with_passing_hook):
|
||||||
assert msg in printed
|
assert msg in printed
|
||||||
|
|
||||||
|
|
||||||
def test_hook_id_not_in_non_verbose_output(repo_with_passing_hook):
|
def test_hook_id_not_in_non_verbose_output(repo_with_passing_hook, mock_out_store_directory):
|
||||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=False))
|
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=False))
|
||||||
assert '[bash_hook]' not in printed
|
assert '[bash_hook]' not in printed
|
||||||
|
|
||||||
|
|
||||||
def test_hook_id_in_verbose_output(repo_with_passing_hook):
|
def test_hook_id_in_verbose_output(repo_with_passing_hook, mock_out_store_directory):
|
||||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=True))
|
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=True))
|
||||||
assert '[bash_hook] Bash hook' in printed
|
assert '[bash_hook] Bash hook' in printed
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import mock
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -8,18 +9,36 @@ import yaml
|
||||||
from plumbum import local
|
from plumbum import local
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
|
from pre_commit import five
|
||||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||||
from pre_commit.jsonschema_extensions import apply_defaults
|
from pre_commit.jsonschema_extensions import apply_defaults
|
||||||
|
from pre_commit.store import Store
|
||||||
from testing.util import copy_tree_to_path
|
from testing.util import copy_tree_to_path
|
||||||
from testing.util import get_head_sha
|
from testing.util import get_head_sha
|
||||||
from testing.util import get_resource_path
|
from testing.util import get_resource_path
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture
|
@pytest.yield_fixture
|
||||||
def in_tmpdir(tmpdir):
|
def tmpdir_factory(tmpdir):
|
||||||
with local.cwd(tmpdir.strpath):
|
class TmpdirFactory(object):
|
||||||
yield tmpdir.strpath
|
def __init__(self):
|
||||||
|
self.tmpdir_count = 0
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
path = os.path.join(tmpdir.strpath, five.text(self.tmpdir_count))
|
||||||
|
self.tmpdir_count += 1
|
||||||
|
os.mkdir(path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
yield TmpdirFactory()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def in_tmpdir(tmpdir_factory):
|
||||||
|
path = tmpdir_factory.get()
|
||||||
|
with local.cwd(path):
|
||||||
|
yield path
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture
|
@pytest.yield_fixture
|
||||||
|
|
@ -154,3 +173,36 @@ def in_merge_conflict(repo_with_passing_hook):
|
||||||
local['git']['commit', '-m', 'conflict_file']()
|
local['git']['commit', '-m', 'conflict_file']()
|
||||||
local['git']['merge', 'foo'](retcode=None)
|
local['git']['merge', 'foo'](retcode=None)
|
||||||
yield os.path.join(repo_with_passing_hook, 'foo')
|
yield os.path.join(repo_with_passing_hook, 'foo')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture(scope='session', autouse=True)
|
||||||
|
def dont_write_to_home_directory():
|
||||||
|
"""pre_commit.store.Store will by default write to the home directory
|
||||||
|
We'll mock out `Store.get_default_directory` to raise invariantly so we
|
||||||
|
don't construct a `Store` object that writes to our home directory.
|
||||||
|
"""
|
||||||
|
class YouForgotToExplicitlyChooseAStoreDirectory(AssertionError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
Store,
|
||||||
|
'get_default_directory',
|
||||||
|
side_effect=YouForgotToExplicitlyChooseAStoreDirectory,
|
||||||
|
):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def mock_out_store_directory(tmpdir_factory):
|
||||||
|
tmpdir = tmpdir_factory.get()
|
||||||
|
with mock.patch.object(
|
||||||
|
Store,
|
||||||
|
'get_default_directory',
|
||||||
|
return_value=tmpdir,
|
||||||
|
):
|
||||||
|
yield tmpdir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def store(tmpdir_factory):
|
||||||
|
yield Store(os.path.join(tmpdir_factory.get(), '.pre-commit'))
|
||||||
|
|
|
||||||
34
tests/manifest_test.py
Normal file
34
tests/manifest_test.py
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pre_commit.manifest import Manifest
|
||||||
|
from testing.util import get_head_sha
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def manifest(store, script_hooks_repo):
|
||||||
|
head_sha = get_head_sha(script_hooks_repo)
|
||||||
|
repo_path_getter = store.get_repo_path_getter(script_hooks_repo, head_sha)
|
||||||
|
yield Manifest(repo_path_getter)
|
||||||
|
|
||||||
|
|
||||||
|
def test_manifest_contents(manifest):
|
||||||
|
# Should just retrieve the manifest contents
|
||||||
|
assert manifest.manifest_contents == [{
|
||||||
|
'description': '',
|
||||||
|
'entry': 'bin/hook.sh',
|
||||||
|
'expected_return_value': 0,
|
||||||
|
'id': 'bash_hook',
|
||||||
|
'language': 'script',
|
||||||
|
'name': 'Bash hook',
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
def test_hooks(manifest):
|
||||||
|
assert manifest.hooks['bash_hook'] == {
|
||||||
|
'description': '',
|
||||||
|
'entry': 'bin/hook.sh',
|
||||||
|
'expected_return_value': 0,
|
||||||
|
'id': 'bash_hook',
|
||||||
|
'language': 'script',
|
||||||
|
'name': 'Bash hook',
|
||||||
|
}
|
||||||
|
|
@ -73,6 +73,7 @@ PATH_TESTS = (
|
||||||
('foo/bar', '../baz', 'foo/baz'),
|
('foo/bar', '../baz', 'foo/baz'),
|
||||||
('./', 'bar', 'bar'),
|
('./', 'bar', 'bar'),
|
||||||
('./', '', '.'),
|
('./', '', '.'),
|
||||||
|
('/tmp/foo', '/tmp/bar', '/tmp/bar'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,58 +1,26 @@
|
||||||
import mock
|
import os.path
|
||||||
import os
|
|
||||||
import pytest
|
import pytest
|
||||||
from plumbum import local
|
from plumbum import local
|
||||||
|
|
||||||
import pre_commit.constants as C
|
|
||||||
from pre_commit import repository
|
|
||||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||||
from pre_commit.jsonschema_extensions import apply_defaults
|
from pre_commit.jsonschema_extensions import apply_defaults
|
||||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||||
from pre_commit.repository import Repository
|
from pre_commit.repository import Repository
|
||||||
from testing.util import get_head_sha
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def dummy_repo_config(dummy_git_repo):
|
|
||||||
# This is not a valid config, but it is pretty close
|
|
||||||
return {
|
|
||||||
'repo': dummy_git_repo,
|
|
||||||
'sha': get_head_sha(dummy_git_repo),
|
|
||||||
'hooks': [],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_create_repo_in_env(dummy_repo_config, dummy_git_repo):
|
def test_install_python_repo_in_env(config_for_python_hooks_repo, store):
|
||||||
repo = Repository(dummy_repo_config)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
repo.create()
|
repo.install(PrefixedCommandRunner(store.directory))
|
||||||
|
assert os.path.exists(os.path.join(store.directory, repo.sha, 'py_env'))
|
||||||
assert os.path.exists(
|
|
||||||
os.path.join(dummy_git_repo, C.HOOKS_WORKSPACE, repo.sha),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_install_python_repo_in_env(config_for_python_hooks_repo):
|
def test_run_a_python_hook(config_for_python_hooks_repo, store):
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
repo.install(PrefixedCommandRunner(C.HOOKS_WORKSPACE))
|
|
||||||
|
|
||||||
assert os.path.exists(
|
|
||||||
os.path.join(
|
|
||||||
repo.repo_url,
|
|
||||||
C.HOOKS_WORKSPACE,
|
|
||||||
repo.sha,
|
|
||||||
'py_env',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_run_a_python_hook(config_for_python_hooks_repo):
|
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
|
||||||
ret = repo.run_hook(
|
ret = repo.run_hook(
|
||||||
PrefixedCommandRunner(C.HOOKS_WORKSPACE), 'foo', ['/dev/null'],
|
PrefixedCommandRunner(store.directory), 'foo', ['/dev/null'],
|
||||||
)
|
)
|
||||||
|
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
|
|
@ -60,21 +28,21 @@ def test_run_a_python_hook(config_for_python_hooks_repo):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_run_a_hook_lots_of_files(config_for_python_hooks_repo):
|
def test_lots_of_files(config_for_python_hooks_repo, store):
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
ret = repo.run_hook(
|
ret = repo.run_hook(
|
||||||
PrefixedCommandRunner(C.HOOKS_WORKSPACE), 'foo', ['/dev/null'] * 15000,
|
PrefixedCommandRunner(store.directory), 'foo', ['/dev/null'] * 15000,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_cwd_of_hook(config_for_prints_cwd_repo):
|
def test_cwd_of_hook(config_for_prints_cwd_repo, store):
|
||||||
# Note: this doubles as a test for `system` hooks
|
# Note: this doubles as a test for `system` hooks
|
||||||
repo = Repository(config_for_prints_cwd_repo)
|
repo = Repository.create(config_for_prints_cwd_repo, store)
|
||||||
ret = repo.run_hook(
|
ret = repo.run_hook(
|
||||||
PrefixedCommandRunner(C.HOOKS_WORKSPACE), 'prints_cwd', [],
|
PrefixedCommandRunner(store.directory), 'prints_cwd', [],
|
||||||
)
|
)
|
||||||
|
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
|
|
@ -86,19 +54,19 @@ def test_cwd_of_hook(config_for_prints_cwd_repo):
|
||||||
reason="TODO: make this test not super slow",
|
reason="TODO: make this test not super slow",
|
||||||
)
|
)
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_run_a_node_hook(config_for_node_hooks_repo):
|
def test_run_a_node_hook(config_for_node_hooks_repo, store):
|
||||||
repo = Repository(config_for_node_hooks_repo)
|
repo = Repository.create(config_for_node_hooks_repo, store)
|
||||||
ret = repo.run_hook(PrefixedCommandRunner(C.HOOKS_WORKSPACE), 'foo', [])
|
ret = repo.run_hook(PrefixedCommandRunner(store.directory), 'foo', [])
|
||||||
|
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
assert ret[1] == 'Hello World\n'
|
assert ret[1] == 'Hello World\n'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_run_a_script_hook(config_for_script_hooks_repo):
|
def test_run_a_script_hook(config_for_script_hooks_repo, store):
|
||||||
repo = Repository(config_for_script_hooks_repo)
|
repo = Repository.create(config_for_script_hooks_repo, store)
|
||||||
ret = repo.run_hook(
|
ret = repo.run_hook(
|
||||||
PrefixedCommandRunner(C.HOOKS_WORKSPACE), 'bash_hook', ['bar'],
|
PrefixedCommandRunner(store.directory), 'bash_hook', ['bar'],
|
||||||
)
|
)
|
||||||
|
|
||||||
assert ret[0] == 0
|
assert ret[0] == 0
|
||||||
|
|
@ -121,60 +89,37 @@ def mock_repo_config():
|
||||||
|
|
||||||
|
|
||||||
def test_repo_url(mock_repo_config):
|
def test_repo_url(mock_repo_config):
|
||||||
repo = Repository(mock_repo_config)
|
repo = Repository(mock_repo_config, None)
|
||||||
assert repo.repo_url == 'git@github.com:pre-commit/pre-commit-hooks'
|
assert repo.repo_url == 'git@github.com:pre-commit/pre-commit-hooks'
|
||||||
|
|
||||||
|
|
||||||
def test_sha(mock_repo_config):
|
def test_sha(mock_repo_config):
|
||||||
repo = Repository(mock_repo_config)
|
repo = Repository(mock_repo_config, None)
|
||||||
assert repo.sha == '5e713f8878b7d100c0e059f8cc34be4fc2e8f897'
|
assert repo.sha == '5e713f8878b7d100c0e059f8cc34be4fc2e8f897'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_languages(config_for_python_hooks_repo):
|
def test_languages(config_for_python_hooks_repo, store):
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
assert repo.languages == set(['python'])
|
assert repo.languages == set(['python'])
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture
|
def test_reinstall(config_for_python_hooks_repo, store):
|
||||||
def logger_mock():
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
with mock.patch.object(
|
repo.require_installed(PrefixedCommandRunner(store.directory))
|
||||||
repository.logger, 'info', autospec=True,
|
|
||||||
) as info_mock:
|
|
||||||
yield info_mock
|
|
||||||
|
|
||||||
|
|
||||||
def test_prints_while_creating(config_for_python_hooks_repo, logger_mock):
|
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
|
||||||
repo.require_created()
|
|
||||||
logger_mock.assert_called_with('This may take a few minutes...')
|
|
||||||
logger_mock.reset_mock()
|
|
||||||
# Reinstall with same repo should not trigger another install
|
|
||||||
repo.require_created()
|
|
||||||
assert logger_mock.call_count == 0
|
|
||||||
# Reinstall on another run should not trigger another install
|
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
|
||||||
repo.require_created()
|
|
||||||
assert logger_mock.call_count == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_reinstall(config_for_python_hooks_repo):
|
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
|
||||||
repo.require_installed(PrefixedCommandRunner(C.HOOKS_WORKSPACE))
|
|
||||||
# Reinstall with same repo should not trigger another install
|
# Reinstall with same repo should not trigger another install
|
||||||
# TODO: how to assert this?
|
# TODO: how to assert this?
|
||||||
repo.require_installed(PrefixedCommandRunner(C.HOOKS_WORKSPACE))
|
repo.require_installed(PrefixedCommandRunner(store.directory))
|
||||||
# Reinstall on another run should not trigger another install
|
# Reinstall on another run should not trigger another install
|
||||||
# TODO: how to assert this?
|
# TODO: how to assert this?
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
repo.require_installed(PrefixedCommandRunner(C.HOOKS_WORKSPACE))
|
repo.require_installed(PrefixedCommandRunner(store.directory))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_really_long_file_paths(config_for_python_hooks_repo):
|
def test_really_long_file_paths(config_for_python_hooks_repo, store):
|
||||||
path = 'really_long' * 10
|
path = 'really_long' * 10
|
||||||
local['git']['init', path]()
|
local['git']['init', path]()
|
||||||
with local.cwd(path):
|
with local.cwd(path):
|
||||||
repo = Repository(config_for_python_hooks_repo)
|
repo = Repository.create(config_for_python_hooks_repo, store)
|
||||||
repo.require_installed(PrefixedCommandRunner(C.HOOKS_WORKSPACE))
|
repo.require_installed(PrefixedCommandRunner(store.directory))
|
||||||
|
|
|
||||||
|
|
@ -33,19 +33,14 @@ def test_changes_to_root_of_git_dir(git_dir_with_directory):
|
||||||
assert os.getcwd() == git_dir_with_directory
|
assert os.getcwd() == git_dir_with_directory
|
||||||
|
|
||||||
|
|
||||||
def test_hooks_workspace_path():
|
|
||||||
runner = Runner('foo/bar')
|
|
||||||
expected_path = os.path.join('foo/bar', C.HOOKS_WORKSPACE)
|
|
||||||
assert runner.hooks_workspace_path == expected_path
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_path():
|
def test_config_file_path():
|
||||||
runner = Runner('foo/bar')
|
runner = Runner('foo/bar')
|
||||||
expected_path = os.path.join('foo/bar', C.CONFIG_FILE)
|
expected_path = os.path.join('foo/bar', C.CONFIG_FILE)
|
||||||
assert runner.config_file_path == expected_path
|
assert runner.config_file_path == expected_path
|
||||||
|
|
||||||
|
|
||||||
def test_repositories(consumer_repo):
|
def test_repositories(consumer_repo, mock_out_store_directory):
|
||||||
|
# TODO: make this not have external deps
|
||||||
runner = Runner(consumer_repo)
|
runner = Runner(consumer_repo)
|
||||||
assert len(runner.repositories) == 2
|
assert len(runner.repositories) == 2
|
||||||
assert [repo.repo_url for repo in runner.repositories] == [
|
assert [repo.repo_url for repo in runner.repositories] == [
|
||||||
|
|
@ -60,7 +55,7 @@ def test_pre_commit_path():
|
||||||
assert runner.pre_commit_path == expected_path
|
assert runner.pre_commit_path == expected_path
|
||||||
|
|
||||||
|
|
||||||
def test_cmd_runner():
|
def test_cmd_runner(mock_out_store_directory):
|
||||||
runner = Runner('foo/bar')
|
runner = Runner('foo/bar')
|
||||||
ret = runner.cmd_runner
|
ret = runner.cmd_runner
|
||||||
assert ret.prefix_dir == os.path.join('foo/bar', C.HOOKS_WORKSPACE) + '/'
|
assert ret.prefix_dir == os.path.join(mock_out_store_directory) + '/'
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ import pytest
|
||||||
import shutil
|
import shutil
|
||||||
from plumbum import local
|
from plumbum import local
|
||||||
|
|
||||||
import pre_commit.constants as C
|
|
||||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||||
from pre_commit.staged_files_only import staged_files_only
|
from pre_commit.staged_files_only import staged_files_only
|
||||||
from testing.auto_namedtuple import auto_namedtuple
|
from testing.auto_namedtuple import auto_namedtuple
|
||||||
|
|
@ -23,17 +22,8 @@ def get_short_git_status():
|
||||||
return dict(reversed(line.split()) for line in git_status.splitlines())
|
return dict(reversed(line.split()) for line in git_status.splitlines())
|
||||||
|
|
||||||
|
|
||||||
def write_gitignore():
|
|
||||||
with open('.gitignore', 'w') as gitignore_file:
|
|
||||||
gitignore_file.write(C.HOOKS_WORKSPACE + '\n')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture
|
@pytest.yield_fixture
|
||||||
def foo_staged(empty_git_dir):
|
def foo_staged(empty_git_dir):
|
||||||
write_gitignore()
|
|
||||||
local['git']['add', '.']()
|
|
||||||
local['git']['commit', '-m', 'add gitignore']()
|
|
||||||
|
|
||||||
with io.open('foo', 'w') as foo_file:
|
with io.open('foo', 'w') as foo_file:
|
||||||
foo_file.write(FOO_CONTENTS)
|
foo_file.write(FOO_CONTENTS)
|
||||||
local['git']['add', 'foo']()
|
local['git']['add', 'foo']()
|
||||||
|
|
@ -41,9 +31,9 @@ def foo_staged(empty_git_dir):
|
||||||
yield auto_namedtuple(path=empty_git_dir, foo_filename=foo_filename)
|
yield auto_namedtuple(path=empty_git_dir, foo_filename=foo_filename)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.yield_fixture
|
||||||
def cmd_runner():
|
def cmd_runner(tmpdir_factory):
|
||||||
return PrefixedCommandRunner(C.HOOKS_WORKSPACE)
|
yield PrefixedCommandRunner(tmpdir_factory.get())
|
||||||
|
|
||||||
|
|
||||||
def _test_foo_state(path, foo_contents=FOO_CONTENTS, status='A'):
|
def _test_foo_state(path, foo_contents=FOO_CONTENTS, status='A'):
|
||||||
|
|
@ -113,10 +103,6 @@ def test_foo_both_modify_conflicting(foo_staged, cmd_runner):
|
||||||
|
|
||||||
@pytest.yield_fixture
|
@pytest.yield_fixture
|
||||||
def img_staged(empty_git_dir):
|
def img_staged(empty_git_dir):
|
||||||
write_gitignore()
|
|
||||||
local['git']['add', '.']()
|
|
||||||
local['git']['commit', '-m', 'add gitignore']()
|
|
||||||
|
|
||||||
img_filename = os.path.join(empty_git_dir, 'img.jpg')
|
img_filename = os.path.join(empty_git_dir, 'img.jpg')
|
||||||
shutil.copy(get_resource_path('img1.jpg'), img_filename)
|
shutil.copy(get_resource_path('img1.jpg'), img_filename)
|
||||||
local['git']['add', 'img.jpg']()
|
local['git']['add', 'img.jpg']()
|
||||||
|
|
|
||||||
134
tests/store_test.py
Normal file
134
tests/store_test.py
Normal file
|
|
@ -0,0 +1,134 @@
|
||||||
|
import io
|
||||||
|
import mock
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import pytest
|
||||||
|
import shutil
|
||||||
|
from plumbum import local
|
||||||
|
|
||||||
|
from pre_commit import five
|
||||||
|
from pre_commit.store import _get_default_directory
|
||||||
|
from pre_commit.store import logger
|
||||||
|
from pre_commit.store import Store
|
||||||
|
from testing.util import get_head_sha
|
||||||
|
|
||||||
|
|
||||||
|
def test_our_session_fixture_works():
|
||||||
|
"""There's a session fixture which makes `Store` invariantly raise to
|
||||||
|
prevent writing to the home directory.
|
||||||
|
"""
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
Store()
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_default_directory_defaults_to_home():
|
||||||
|
# Not we use the module level one which is not mocked
|
||||||
|
ret = _get_default_directory()
|
||||||
|
assert ret == os.path.join(os.environ['HOME'], '.pre-commit')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def store(tmpdir_factory):
|
||||||
|
yield Store(os.path.join(tmpdir_factory.get(), '.pre-commit'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_require_created(store):
|
||||||
|
assert not os.path.exists(store.directory)
|
||||||
|
store.require_created()
|
||||||
|
# Should create the store directory
|
||||||
|
assert os.path.exists(store.directory)
|
||||||
|
# Should create a README file indicating what the directory is about
|
||||||
|
with io.open(os.path.join(store.directory, 'README'), 'r') as readme_file:
|
||||||
|
readme_contents = readme_file.read()
|
||||||
|
for text_line in (
|
||||||
|
'This directory is maintained by the pre-commit project.',
|
||||||
|
'Learn more: https://github.com/pre-commit/pre-commit',
|
||||||
|
):
|
||||||
|
assert text_line in readme_contents
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_require_created_does_not_create_twice(store):
|
||||||
|
assert not os.path.exists(store.directory)
|
||||||
|
store.require_created()
|
||||||
|
# We intentionally delete the directory here so we can figure out if it
|
||||||
|
# calls it again.
|
||||||
|
shutil.rmtree(store.directory)
|
||||||
|
assert not os.path.exists(store.directory)
|
||||||
|
# Call require_created, this should not trigger a call to create
|
||||||
|
store.require_created()
|
||||||
|
assert not os.path.exists(store.directory)
|
||||||
|
|
||||||
|
|
||||||
|
def test_does_not_recreate_if_directory_already_exists(store):
|
||||||
|
assert not os.path.exists(store.directory)
|
||||||
|
# We manually create the directory.
|
||||||
|
# Note: we're intentionally leaving out the README file. This is so we can
|
||||||
|
# know that `Store` didn't call create
|
||||||
|
os.mkdir(store.directory)
|
||||||
|
# Call require_created, this should not call create
|
||||||
|
store.require_created()
|
||||||
|
assert not os.path.exists(os.path.join(store.directory, 'README'))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture
|
||||||
|
def log_info_mock():
|
||||||
|
with mock.patch.object(logger, 'info', autospec=True) as info_mock:
|
||||||
|
yield info_mock
|
||||||
|
|
||||||
|
|
||||||
|
def test_clone(store, empty_git_dir, log_info_mock):
|
||||||
|
with local.cwd(empty_git_dir):
|
||||||
|
local['git']('commit', '--allow-empty', '-m', 'foo')
|
||||||
|
sha = get_head_sha(empty_git_dir)
|
||||||
|
local['git']('commit', '--allow-empty', '-m', 'bar')
|
||||||
|
|
||||||
|
ret = store.clone(empty_git_dir, sha)
|
||||||
|
# Should have printed some stuff
|
||||||
|
log_info_mock.assert_called_with('This may take a few minutes...')
|
||||||
|
|
||||||
|
# Should return a directory inside of the store
|
||||||
|
assert os.path.exists(ret)
|
||||||
|
assert ret.startswith(store.directory)
|
||||||
|
# Directory should start with `repo`
|
||||||
|
_, dirname = os.path.split(ret)
|
||||||
|
assert dirname.startswith('repo')
|
||||||
|
# Should be checked out to the sha we specified
|
||||||
|
assert get_head_sha(ret) == sha
|
||||||
|
|
||||||
|
# Assert that we made a symlink from the sha to the repo
|
||||||
|
sha_path = os.path.join(store.directory, sha)
|
||||||
|
assert os.path.exists(sha_path)
|
||||||
|
assert os.path.islink(sha_path)
|
||||||
|
assert os.readlink(sha_path) == ret
|
||||||
|
|
||||||
|
|
||||||
|
def test_clone_cleans_up_on_checkout_failure(store):
|
||||||
|
try:
|
||||||
|
# This raises an exception because you can't clone something that
|
||||||
|
# doesn't exist!
|
||||||
|
store.clone('/i_dont_exist_lol', 'fake_sha')
|
||||||
|
except Exception as e:
|
||||||
|
assert '/i_dont_exist_lol' in five.text(e)
|
||||||
|
|
||||||
|
things_starting_with_repo = [
|
||||||
|
thing for thing in os.listdir(store.directory)
|
||||||
|
if thing.startswith('repo')
|
||||||
|
]
|
||||||
|
assert things_starting_with_repo == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_has_cmd_runner_at_directory(store):
|
||||||
|
ret = store.cmd_runner
|
||||||
|
assert ret.prefix_dir == store.directory + os.sep
|
||||||
|
|
||||||
|
|
||||||
|
def test_clone_when_repo_already_exists(store):
|
||||||
|
# Create a symlink and directory in the store simulating an already
|
||||||
|
# created repository.
|
||||||
|
store.require_created()
|
||||||
|
repo_dir_path = os.path.join(store.directory, 'repo_dir')
|
||||||
|
os.mkdir(repo_dir_path)
|
||||||
|
os.symlink(repo_dir_path, os.path.join(store.directory, 'fake_sha'))
|
||||||
|
|
||||||
|
ret = store.clone('url', 'fake_sha')
|
||||||
|
assert ret == repo_dir_path
|
||||||
Loading…
Add table
Add a link
Reference in a new issue