mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Merge pull request #711 from pre-commit/different_additional
Each set of additional dependencies gets its own env
This commit is contained in:
commit
082c950d8f
5 changed files with 62 additions and 112 deletions
|
|
@ -33,9 +33,9 @@ def _update_repo(repo_config, runner, tags_only):
|
||||||
Args:
|
Args:
|
||||||
repo_config - A config for a repository
|
repo_config - A config for a repository
|
||||||
"""
|
"""
|
||||||
repo = Repository.create(repo_config, runner.store)
|
repo_path = runner.store.clone(repo_config['repo'], repo_config['sha'])
|
||||||
|
|
||||||
with cwd(repo._repo_path):
|
with cwd(repo_path):
|
||||||
cmd_output('git', 'fetch')
|
cmd_output('git', 'fetch')
|
||||||
tag_cmd = ('git', 'describe', 'origin/master', '--tags')
|
tag_cmd = ('git', 'describe', 'origin/master', '--tags')
|
||||||
if tags_only:
|
if tags_only:
|
||||||
|
|
@ -57,7 +57,7 @@ def _update_repo(repo_config, runner, tags_only):
|
||||||
new_repo = Repository.create(new_config, runner.store)
|
new_repo = Repository.create(new_config, runner.store)
|
||||||
|
|
||||||
# See if any of our hooks were deleted with the new commits
|
# See if any of our hooks were deleted with the new commits
|
||||||
hooks = {hook['id'] for hook in repo.repo_config['hooks']}
|
hooks = {hook['id'] for hook in repo_config['hooks']}
|
||||||
hooks_missing = hooks - (hooks & set(new_repo.manifest_hooks))
|
hooks_missing = hooks - (hooks & set(new_repo.manifest_hooks))
|
||||||
if hooks_missing:
|
if hooks_missing:
|
||||||
raise RepositoryCannotBeUpdatedError(
|
raise RepositoryCannotBeUpdatedError(
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ import os
|
||||||
import pipes
|
import pipes
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
from cached_property import cached_property
|
from cached_property import cached_property
|
||||||
|
|
@ -149,22 +148,11 @@ class Repository(object):
|
||||||
else:
|
else:
|
||||||
return cls(config, store)
|
return cls(config, store)
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def _repo_path(self):
|
|
||||||
return self.store.clone(
|
|
||||||
self.repo_config['repo'], self.repo_config['sha'],
|
|
||||||
)
|
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def _prefix(self):
|
|
||||||
return Prefix(self._repo_path)
|
|
||||||
|
|
||||||
def _prefix_from_deps(self, language_name, deps):
|
|
||||||
return self._prefix
|
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def manifest_hooks(self):
|
def manifest_hooks(self):
|
||||||
manifest_path = os.path.join(self._repo_path, C.MANIFEST_FILE)
|
repo, sha = self.repo_config['repo'], self.repo_config['sha']
|
||||||
|
repo_path = self.store.clone(repo, sha)
|
||||||
|
manifest_path = os.path.join(repo_path, C.MANIFEST_FILE)
|
||||||
return {hook['id']: hook for hook in load_manifest(manifest_path)}
|
return {hook['id']: hook for hook in load_manifest(manifest_path)}
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
|
|
@ -185,21 +173,25 @@ class Repository(object):
|
||||||
for hook in self.repo_config['hooks']
|
for hook in self.repo_config['hooks']
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached_property
|
def _prefix_from_deps(self, language_name, deps):
|
||||||
|
repo, sha = self.repo_config['repo'], self.repo_config['sha']
|
||||||
|
return Prefix(self.store.clone(repo, sha, deps))
|
||||||
|
|
||||||
def _venvs(self):
|
def _venvs(self):
|
||||||
deps_dict = defaultdict(_UniqueList)
|
|
||||||
for _, hook in self.hooks:
|
|
||||||
deps_dict[(hook['language'], hook['language_version'])].update(
|
|
||||||
hook['additional_dependencies'],
|
|
||||||
)
|
|
||||||
ret = []
|
ret = []
|
||||||
for (language, version), deps in deps_dict.items():
|
for _, hook in self.hooks:
|
||||||
ret.append((self._prefix, language, version, deps))
|
language = hook['language']
|
||||||
|
version = hook['language_version']
|
||||||
|
deps = hook['additional_dependencies']
|
||||||
|
ret.append((
|
||||||
|
self._prefix_from_deps(language, deps),
|
||||||
|
language, version, deps,
|
||||||
|
))
|
||||||
return tuple(ret)
|
return tuple(ret)
|
||||||
|
|
||||||
def require_installed(self):
|
def require_installed(self):
|
||||||
if not self.__installed:
|
if not self.__installed:
|
||||||
_install_all(self._venvs, self.repo_config['repo'], self.store)
|
_install_all(self._venvs(), self.repo_config['repo'], self.store)
|
||||||
self.__installed = True
|
self.__installed = True
|
||||||
|
|
||||||
def run_hook(self, hook, file_args):
|
def run_hook(self, hook, file_args):
|
||||||
|
|
@ -237,19 +229,6 @@ class LocalRepository(Repository):
|
||||||
for hook in self.repo_config['hooks']
|
for hook in self.repo_config['hooks']
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def _venvs(self):
|
|
||||||
ret = []
|
|
||||||
for _, hook in self.hooks:
|
|
||||||
language = hook['language']
|
|
||||||
version = hook['language_version']
|
|
||||||
deps = hook['additional_dependencies']
|
|
||||||
ret.append((
|
|
||||||
self._prefix_from_deps(language, deps),
|
|
||||||
language, version, deps,
|
|
||||||
))
|
|
||||||
return tuple(ret)
|
|
||||||
|
|
||||||
|
|
||||||
class MetaRepository(LocalRepository):
|
class MetaRepository(LocalRepository):
|
||||||
@cached_property
|
@cached_property
|
||||||
|
|
@ -303,14 +282,3 @@ class MetaRepository(LocalRepository):
|
||||||
(hook['id'], _hook(self.manifest_hooks[hook['id']], hook))
|
(hook['id'], _hook(self.manifest_hooks[hook['id']], hook))
|
||||||
for hook in self.repo_config['hooks']
|
for hook in self.repo_config['hooks']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class _UniqueList(list):
|
|
||||||
def __init__(self):
|
|
||||||
self._set = set()
|
|
||||||
|
|
||||||
def update(self, obj):
|
|
||||||
for item in obj:
|
|
||||||
if item not in self._set:
|
|
||||||
self._set.add(item)
|
|
||||||
self.append(item)
|
|
||||||
|
|
|
||||||
|
|
@ -72,9 +72,9 @@ class Store(object):
|
||||||
with contextlib.closing(sqlite3.connect(tmpfile)) as db:
|
with contextlib.closing(sqlite3.connect(tmpfile)) as db:
|
||||||
db.executescript(
|
db.executescript(
|
||||||
'CREATE TABLE repos ('
|
'CREATE TABLE repos ('
|
||||||
' repo CHAR(255) NOT NULL,'
|
' repo TEXT NOT NULL,'
|
||||||
' ref CHAR(255) NOT NULL,'
|
' ref TEXT NOT NULL,'
|
||||||
' path CHAR(255) NOT NULL,'
|
' path TEXT NOT NULL,'
|
||||||
' PRIMARY KEY (repo, ref)'
|
' PRIMARY KEY (repo, ref)'
|
||||||
');',
|
');',
|
||||||
)
|
)
|
||||||
|
|
@ -101,15 +101,17 @@ class Store(object):
|
||||||
self._create()
|
self._create()
|
||||||
self.__created = True
|
self.__created = True
|
||||||
|
|
||||||
def _new_repo(self, repo, ref, make_strategy):
|
def _new_repo(self, repo, ref, deps, make_strategy):
|
||||||
self.require_created()
|
self.require_created()
|
||||||
|
if deps:
|
||||||
|
repo = '{}:{}'.format(repo, ','.join(sorted(deps)))
|
||||||
|
|
||||||
def _get_result():
|
def _get_result():
|
||||||
# Check if we already exist
|
# Check if we already exist
|
||||||
with sqlite3.connect(self.db_path) as db:
|
with sqlite3.connect(self.db_path) as db:
|
||||||
result = db.execute(
|
result = db.execute(
|
||||||
'SELECT path FROM repos WHERE repo = ? AND ref = ?',
|
'SELECT path FROM repos WHERE repo = ? AND ref = ?',
|
||||||
[repo, ref],
|
(repo, ref),
|
||||||
).fetchone()
|
).fetchone()
|
||||||
if result:
|
if result:
|
||||||
return result[0]
|
return result[0]
|
||||||
|
|
@ -137,7 +139,7 @@ class Store(object):
|
||||||
)
|
)
|
||||||
return directory
|
return directory
|
||||||
|
|
||||||
def clone(self, repo, ref):
|
def clone(self, repo, ref, deps=()):
|
||||||
"""Clone the given url and checkout the specific ref."""
|
"""Clone the given url and checkout the specific ref."""
|
||||||
def clone_strategy(directory):
|
def clone_strategy(directory):
|
||||||
cmd_output(
|
cmd_output(
|
||||||
|
|
@ -151,7 +153,7 @@ class Store(object):
|
||||||
env=no_git_env(),
|
env=no_git_env(),
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._new_repo(repo, ref, clone_strategy)
|
return self._new_repo(repo, ref, deps, clone_strategy)
|
||||||
|
|
||||||
def make_local(self, deps):
|
def make_local(self, deps):
|
||||||
def make_local_strategy(directory):
|
def make_local_strategy(directory):
|
||||||
|
|
@ -172,8 +174,7 @@ class Store(object):
|
||||||
_git_cmd('commit', '--no-edit', '--no-gpg-sign', '-n', '-minit')
|
_git_cmd('commit', '--no-edit', '--no-gpg-sign', '-n', '-minit')
|
||||||
|
|
||||||
return self._new_repo(
|
return self._new_repo(
|
||||||
'local:{}'.format(','.join(sorted(deps))), C.LOCAL_REPO_VERSION,
|
'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy,
|
||||||
make_local_strategy,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
|
|
|
||||||
|
|
@ -165,12 +165,6 @@ def log_info_mock():
|
||||||
yield mck
|
yield mck
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def log_warning_mock():
|
|
||||||
with mock.patch.object(logging.getLogger('pre_commit'), 'warning') as mck:
|
|
||||||
yield mck
|
|
||||||
|
|
||||||
|
|
||||||
class FakeStream(object):
|
class FakeStream(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.data = io.BytesIO()
|
self.data = io.BytesIO()
|
||||||
|
|
|
||||||
|
|
@ -433,7 +433,7 @@ def test_venvs(tempdir_factory, store):
|
||||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||||
config = make_config_from_repo(path)
|
config = make_config_from_repo(path)
|
||||||
repo = Repository.create(config, store)
|
repo = Repository.create(config, store)
|
||||||
venv, = repo._venvs
|
venv, = repo._venvs()
|
||||||
assert venv == (mock.ANY, 'python', python.get_default_version(), [])
|
assert venv == (mock.ANY, 'python', python.get_default_version(), [])
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -443,50 +443,33 @@ def test_additional_dependencies(tempdir_factory, store):
|
||||||
config = make_config_from_repo(path)
|
config = make_config_from_repo(path)
|
||||||
config['hooks'][0]['additional_dependencies'] = ['pep8']
|
config['hooks'][0]['additional_dependencies'] = ['pep8']
|
||||||
repo = Repository.create(config, store)
|
repo = Repository.create(config, store)
|
||||||
venv, = repo._venvs
|
venv, = repo._venvs()
|
||||||
assert venv == (mock.ANY, 'python', python.get_default_version(), ['pep8'])
|
assert venv == (mock.ANY, 'python', python.get_default_version(), ['pep8'])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_additional_dependencies_duplicated(
|
|
||||||
tempdir_factory, store, log_warning_mock,
|
|
||||||
):
|
|
||||||
path = make_repo(tempdir_factory, 'ruby_hooks_repo')
|
|
||||||
config = make_config_from_repo(path)
|
|
||||||
deps = ['thread_safe', 'tins', 'thread_safe']
|
|
||||||
config['hooks'][0]['additional_dependencies'] = deps
|
|
||||||
repo = Repository.create(config, store)
|
|
||||||
venv, = repo._venvs
|
|
||||||
assert venv == (mock.ANY, 'ruby', 'default', ['thread_safe', 'tins'])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_additional_python_dependencies_installed(tempdir_factory, store):
|
|
||||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
|
||||||
config = make_config_from_repo(path)
|
|
||||||
config['hooks'][0]['additional_dependencies'] = ['mccabe']
|
|
||||||
repo = Repository.create(config, store)
|
|
||||||
repo.require_installed()
|
|
||||||
with python.in_env(repo._prefix, 'default'):
|
|
||||||
output = cmd_output('pip', 'freeze', '-l')[1]
|
|
||||||
assert 'mccabe' in output
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_additional_dependencies_roll_forward(tempdir_factory, store):
|
def test_additional_dependencies_roll_forward(tempdir_factory, store):
|
||||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||||
config = make_config_from_repo(path)
|
|
||||||
# Run the repo once without additional_dependencies
|
config1 = make_config_from_repo(path)
|
||||||
repo = Repository.create(config, store)
|
repo1 = Repository.create(config1, store)
|
||||||
repo.require_installed()
|
repo1.require_installed()
|
||||||
# Now run it with additional_dependencies
|
(prefix1, _, version1, _), = repo1._venvs()
|
||||||
config['hooks'][0]['additional_dependencies'] = ['mccabe']
|
with python.in_env(prefix1, version1):
|
||||||
repo = Repository.create(config, store)
|
assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
|
||||||
repo.require_installed()
|
|
||||||
# We should see our additional dependency installed
|
# Make another repo with additional dependencies
|
||||||
with python.in_env(repo._prefix, 'default'):
|
config2 = make_config_from_repo(path)
|
||||||
output = cmd_output('pip', 'freeze', '-l')[1]
|
config2['hooks'][0]['additional_dependencies'] = ['mccabe']
|
||||||
assert 'mccabe' in output
|
repo2 = Repository.create(config2, store)
|
||||||
|
repo2.require_installed()
|
||||||
|
(prefix2, _, version2, _), = repo2._venvs()
|
||||||
|
with python.in_env(prefix2, version2):
|
||||||
|
assert 'mccabe' in cmd_output('pip', 'freeze', '-l')[1]
|
||||||
|
|
||||||
|
# should not have affected original
|
||||||
|
with python.in_env(prefix1, version1):
|
||||||
|
assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
|
||||||
|
|
||||||
|
|
||||||
@xfailif_windows_no_ruby
|
@xfailif_windows_no_ruby
|
||||||
|
|
@ -499,7 +482,8 @@ def test_additional_ruby_dependencies_installed(
|
||||||
config['hooks'][0]['additional_dependencies'] = ['thread_safe', 'tins']
|
config['hooks'][0]['additional_dependencies'] = ['thread_safe', 'tins']
|
||||||
repo = Repository.create(config, store)
|
repo = Repository.create(config, store)
|
||||||
repo.require_installed()
|
repo.require_installed()
|
||||||
with ruby.in_env(repo._prefix, 'default'):
|
(prefix, _, version, _), = repo._venvs()
|
||||||
|
with ruby.in_env(prefix, version):
|
||||||
output = cmd_output('gem', 'list', '--local')[1]
|
output = cmd_output('gem', 'list', '--local')[1]
|
||||||
assert 'thread_safe' in output
|
assert 'thread_safe' in output
|
||||||
assert 'tins' in output
|
assert 'tins' in output
|
||||||
|
|
@ -516,7 +500,8 @@ def test_additional_node_dependencies_installed(
|
||||||
config['hooks'][0]['additional_dependencies'] = ['lodash']
|
config['hooks'][0]['additional_dependencies'] = ['lodash']
|
||||||
repo = Repository.create(config, store)
|
repo = Repository.create(config, store)
|
||||||
repo.require_installed()
|
repo.require_installed()
|
||||||
with node.in_env(repo._prefix, 'default'):
|
(prefix, _, version, _), = repo._venvs()
|
||||||
|
with node.in_env(prefix, version):
|
||||||
output = cmd_output('npm', 'ls', '-g')[1]
|
output = cmd_output('npm', 'ls', '-g')[1]
|
||||||
assert 'lodash' in output
|
assert 'lodash' in output
|
||||||
|
|
||||||
|
|
@ -532,7 +517,8 @@ def test_additional_golang_dependencies_installed(
|
||||||
config['hooks'][0]['additional_dependencies'] = deps
|
config['hooks'][0]['additional_dependencies'] = deps
|
||||||
repo = Repository.create(config, store)
|
repo = Repository.create(config, store)
|
||||||
repo.require_installed()
|
repo.require_installed()
|
||||||
binaries = os.listdir(repo._prefix.path(
|
(prefix, _, _, _), = repo._venvs()
|
||||||
|
binaries = os.listdir(prefix.path(
|
||||||
helpers.environment_dir(golang.ENVIRONMENT_DIR, 'default'), 'bin',
|
helpers.environment_dir(golang.ENVIRONMENT_DIR, 'default'), 'bin',
|
||||||
))
|
))
|
||||||
# normalize for windows
|
# normalize for windows
|
||||||
|
|
@ -598,8 +584,9 @@ def test_control_c_control_c_on_install(tempdir_factory, store):
|
||||||
repo.run_hook(hook, [])
|
repo.run_hook(hook, [])
|
||||||
|
|
||||||
# Should have made an environment, however this environment is broken!
|
# Should have made an environment, however this environment is broken!
|
||||||
envdir = 'py_env-{}'.format(python.get_default_version())
|
(prefix, _, version, _), = repo._venvs()
|
||||||
assert repo._prefix.exists(envdir)
|
envdir = 'py_env-{}'.format(version)
|
||||||
|
assert prefix.exists(envdir)
|
||||||
|
|
||||||
# However, it should be perfectly runnable (reinstall after botched
|
# However, it should be perfectly runnable (reinstall after botched
|
||||||
# install)
|
# install)
|
||||||
|
|
@ -616,8 +603,8 @@ def test_invalidated_virtualenv(tempdir_factory, store):
|
||||||
|
|
||||||
# Simulate breaking of the virtualenv
|
# Simulate breaking of the virtualenv
|
||||||
repo.require_installed()
|
repo.require_installed()
|
||||||
version = python.get_default_version()
|
(prefix, _, version, _), = repo._venvs()
|
||||||
libdir = repo._prefix.path('py_env-{}'.format(version), 'lib', version)
|
libdir = prefix.path('py_env-{}'.format(version), 'lib', version)
|
||||||
paths = [
|
paths = [
|
||||||
os.path.join(libdir, p) for p in ('site.py', 'site.pyc', '__pycache__')
|
os.path.join(libdir, p) for p in ('site.py', 'site.pyc', '__pycache__')
|
||||||
]
|
]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue