mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-17 08:14:42 +04:00
Replace jsonschema with better error messages
This commit is contained in:
parent
71bbdb5b4c
commit
cb8dd335f4
30 changed files with 1064 additions and 842 deletions
|
|
@ -1,74 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
from aspy.yaml import ordered_load
|
||||
|
||||
from pre_commit.clientlib.validate_base import get_validator
|
||||
from testing.util import get_resource_path
|
||||
|
||||
|
||||
class AdditionalValidatorError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def noop_validator():
|
||||
return get_validator({}, ValueError)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def array_validator():
|
||||
return get_validator({'type': 'array'}, ValueError)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def additional_validator():
|
||||
def raises_always(_):
|
||||
raise AdditionalValidatorError
|
||||
|
||||
return get_validator(
|
||||
{},
|
||||
ValueError,
|
||||
additional_validation_strategy=raises_always,
|
||||
)
|
||||
|
||||
|
||||
def test_raises_for_non_existent_file(noop_validator):
|
||||
with pytest.raises(ValueError):
|
||||
noop_validator('file_that_does_not_exist.yaml')
|
||||
|
||||
|
||||
def test_raises_for_invalid_yaml_file(noop_validator):
|
||||
with pytest.raises(ValueError):
|
||||
noop_validator(get_resource_path('non_parseable_yaml_file.notyaml'))
|
||||
|
||||
|
||||
def test_raises_for_failing_schema(array_validator):
|
||||
with pytest.raises(ValueError):
|
||||
array_validator(
|
||||
get_resource_path('valid_yaml_but_invalid_manifest.yaml')
|
||||
)
|
||||
|
||||
|
||||
def test_passes_array_schema(array_validator):
|
||||
array_validator(get_resource_path('array_yaml_file.yaml'))
|
||||
|
||||
|
||||
def test_raises_when_additional_validation_fails(additional_validator):
|
||||
with pytest.raises(AdditionalValidatorError):
|
||||
additional_validator(get_resource_path('array_yaml_file.yaml'))
|
||||
|
||||
|
||||
def test_returns_object_after_validating(noop_validator):
|
||||
ret = noop_validator(get_resource_path('array_yaml_file.yaml'))
|
||||
assert ret == ['foo', 'bar']
|
||||
|
||||
|
||||
def test_load_strategy(noop_validator):
|
||||
ret = noop_validator(
|
||||
get_resource_path('ordering_data_test.yaml'),
|
||||
load_strategy=ordered_load,
|
||||
)
|
||||
assert type(ret) is OrderedDict
|
||||
|
|
@ -1,195 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import jsonschema
|
||||
import pytest
|
||||
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import InvalidConfigError
|
||||
from pre_commit.clientlib.validate_config import run
|
||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from testing.util import get_resource_path
|
||||
from testing.util import is_valid_according_to_schema
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('input', 'expected_output'),
|
||||
(
|
||||
(['.pre-commit-config.yaml'], 0),
|
||||
(['non_existent_file.yaml'], 1),
|
||||
([get_resource_path('valid_yaml_but_invalid_config.yaml')], 1),
|
||||
([get_resource_path('non_parseable_yaml_file.notyaml')], 1),
|
||||
),
|
||||
)
|
||||
def test_run(input, expected_output):
|
||||
assert run(input) == expected_output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('config_obj', 'expected'), (
|
||||
([], False),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
# Exclude pattern must be a string
|
||||
'exclude': 0,
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
False,
|
||||
),
|
||||
))
|
||||
def test_is_valid_according_to_schema(config_obj, expected):
|
||||
ret = is_valid_according_to_schema(config_obj, CONFIG_JSON_SCHEMA)
|
||||
assert ret is expected
|
||||
|
||||
|
||||
def test_config_with_failing_regexes_fails():
|
||||
with pytest.raises(InvalidConfigError):
|
||||
# Note the regex '(' is invalid (unbalanced parens)
|
||||
config = apply_defaults(
|
||||
[{
|
||||
'repo': 'foo',
|
||||
'sha': 'foo',
|
||||
'hooks': [{'id': 'hook_id', 'files': '('}],
|
||||
}],
|
||||
CONFIG_JSON_SCHEMA,
|
||||
)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
def test_config_with_ok_regexes_passes():
|
||||
config = apply_defaults(
|
||||
[{
|
||||
'repo': 'foo',
|
||||
'sha': 'foo',
|
||||
'hooks': [{'id': 'hook_id', 'files': '\\.py$'}],
|
||||
}],
|
||||
CONFIG_JSON_SCHEMA,
|
||||
)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
def test_config_with_invalid_exclude_regex_fails():
|
||||
with pytest.raises(InvalidConfigError):
|
||||
# Note the regex '(' is invalid (unbalanced parens)
|
||||
config = apply_defaults(
|
||||
[{
|
||||
'repo': 'foo',
|
||||
'sha': 'foo',
|
||||
'hooks': [{'id': 'hook_id', 'files': '', 'exclude': '('}],
|
||||
}],
|
||||
CONFIG_JSON_SCHEMA,
|
||||
)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
def test_config_with_ok_exclude_regex_passes():
|
||||
config = apply_defaults(
|
||||
[{
|
||||
'repo': 'foo',
|
||||
'sha': 'foo',
|
||||
'hooks': [{'id': 'hook_id', 'files': '', 'exclude': '^vendor/'}],
|
||||
}],
|
||||
CONFIG_JSON_SCHEMA,
|
||||
)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('config_obj', (
|
||||
[{
|
||||
'repo': 'local',
|
||||
'sha': 'foo',
|
||||
'hooks': [{
|
||||
'id': 'do_not_commit',
|
||||
'name': 'Block if "DO NOT COMMIT" is found',
|
||||
'entry': 'DO NOT COMMIT',
|
||||
'language': 'pcre',
|
||||
'files': '^(.*)$',
|
||||
}],
|
||||
}],
|
||||
))
|
||||
def test_config_with_local_hooks_definition_fails(config_obj):
|
||||
with pytest.raises((
|
||||
jsonschema.exceptions.ValidationError, InvalidConfigError
|
||||
)):
|
||||
jsonschema.validate(config_obj, CONFIG_JSON_SCHEMA)
|
||||
config = apply_defaults(config_obj, CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('config_obj', (
|
||||
[{
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'arg-per-line',
|
||||
'name': 'Args per line hook',
|
||||
'entry': 'bin/hook.sh',
|
||||
'language': 'script',
|
||||
'files': '',
|
||||
'args': ['hello', 'world'],
|
||||
}],
|
||||
}],
|
||||
[{
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'arg-per-line',
|
||||
'name': 'Args per line hook',
|
||||
'entry': 'bin/hook.sh',
|
||||
'language': 'script',
|
||||
'files': '',
|
||||
'args': ['hello', 'world'],
|
||||
}]
|
||||
}],
|
||||
))
|
||||
def test_config_with_local_hooks_definition_passes(config_obj):
|
||||
jsonschema.validate(config_obj, CONFIG_JSON_SCHEMA)
|
||||
config = apply_defaults(config_obj, CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config)
|
||||
|
||||
|
||||
def test_does_not_contain_defaults():
|
||||
"""Due to the way our merging works, if this schema has any defaults they
|
||||
will clobber potentially useful values in the backing manifest. #227
|
||||
"""
|
||||
to_process = [(CONFIG_JSON_SCHEMA, ())]
|
||||
while to_process:
|
||||
schema, route = to_process.pop()
|
||||
# Check this value
|
||||
if isinstance(schema, dict):
|
||||
if 'default' in schema:
|
||||
raise AssertionError(
|
||||
'Unexpected default in schema at {}'.format(
|
||||
' => '.join(route),
|
||||
)
|
||||
)
|
||||
|
||||
for key, value in schema.items():
|
||||
to_process.append((value, route + (key,)))
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.clientlib.validate_manifest import additional_manifest_check
|
||||
from pre_commit.clientlib.validate_manifest import InvalidManifestError
|
||||
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_manifest import run
|
||||
from testing.util import get_resource_path
|
||||
from testing.util import is_valid_according_to_schema
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('input', 'expected_output'),
|
||||
(
|
||||
(['.pre-commit-hooks.yaml'], 0),
|
||||
(['non_existent_file.yaml'], 1),
|
||||
([get_resource_path('valid_yaml_but_invalid_manifest.yaml')], 1),
|
||||
([get_resource_path('non_parseable_yaml_file.notyaml')], 1),
|
||||
),
|
||||
)
|
||||
def test_run(input, expected_output):
|
||||
assert run(input) == expected_output
|
||||
|
||||
|
||||
def test_additional_manifest_check_raises_for_bad_language():
|
||||
with pytest.raises(InvalidManifestError):
|
||||
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'obj',
|
||||
(
|
||||
[{'language': 'python', 'files': ''}],
|
||||
[{'language': 'ruby', 'files': ''}]
|
||||
),
|
||||
)
|
||||
def test_additional_manifest_check_passing(obj):
|
||||
additional_manifest_check(obj)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'obj',
|
||||
(
|
||||
[{'id': 'a', 'language': 'not a language', 'files': ''}],
|
||||
[{'id': 'a', 'language': 'python3', 'files': ''}],
|
||||
[{'id': 'a', 'language': 'python', 'files': 'invalid regex('}],
|
||||
[{'id': 'a', 'language': 'not a language', 'files': ''}],
|
||||
[{'id': 'a', 'language': 'python3', 'files': ''}],
|
||||
[{'id': 'a', 'language': 'python', 'files': '', 'exclude': '('}],
|
||||
),
|
||||
)
|
||||
def test_additional_manifest_failing(obj):
|
||||
with pytest.raises(InvalidManifestError):
|
||||
additional_manifest_check(obj)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('manifest_obj', 'expected'),
|
||||
(
|
||||
([], False),
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'files': r'\.py$'
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'language_version': 'python3.4',
|
||||
'files': r'\.py$',
|
||||
}],
|
||||
True,
|
||||
),
|
||||
)
|
||||
)
|
||||
def test_is_valid_according_to_schema(manifest_obj, expected):
|
||||
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
|
||||
assert ret is expected
|
||||
194
tests/clientlib_test.py
Normal file
194
tests/clientlib_test.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import schema
|
||||
from pre_commit.clientlib import check_language
|
||||
from pre_commit.clientlib import CONFIG_HOOK_DICT
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import is_local_repo
|
||||
from pre_commit.clientlib import MANIFEST_SCHEMA
|
||||
from pre_commit.clientlib import validate_config_main
|
||||
from pre_commit.clientlib import validate_manifest_main
|
||||
from testing.util import get_resource_path
|
||||
|
||||
|
||||
def is_valid_according_to_schema(obj, obj_schema):
|
||||
try:
|
||||
schema.validate(obj, obj_schema)
|
||||
return True
|
||||
except schema.ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.parametrize('value', ('not a language', 'python3'))
|
||||
def test_check_language_failures(value):
|
||||
with pytest.raises(schema.ValidationError):
|
||||
check_language(value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('value', ('python', 'node', 'pcre'))
|
||||
def test_check_language_ok(value):
|
||||
check_language(value)
|
||||
|
||||
|
||||
def test_is_local_repo():
|
||||
assert is_local_repo({'repo': 'local'})
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('args', 'expected_output'),
|
||||
(
|
||||
(['.pre-commit-config.yaml'], 0),
|
||||
(['non_existent_file.yaml'], 1),
|
||||
([get_resource_path('valid_yaml_but_invalid_config.yaml')], 1),
|
||||
([get_resource_path('non_parseable_yaml_file.notyaml')], 1),
|
||||
),
|
||||
)
|
||||
def test_validate_config_main(args, expected_output):
|
||||
assert validate_config_main(args) == expected_output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('config_obj', 'expected'), (
|
||||
([], False),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'sha': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
# Exclude pattern must be a string
|
||||
'exclude': 0,
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
False,
|
||||
),
|
||||
))
|
||||
def test_config_valid(config_obj, expected):
|
||||
ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA)
|
||||
assert ret is expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('config_obj', (
|
||||
[{
|
||||
'repo': 'local',
|
||||
'sha': 'foo',
|
||||
'hooks': [{
|
||||
'id': 'do_not_commit',
|
||||
'name': 'Block if "DO NOT COMMIT" is found',
|
||||
'entry': 'DO NOT COMMIT',
|
||||
'language': 'pcre',
|
||||
'files': '^(.*)$',
|
||||
}],
|
||||
}],
|
||||
))
|
||||
def test_config_with_local_hooks_definition_fails(config_obj):
|
||||
with pytest.raises(schema.ValidationError):
|
||||
schema.validate(config_obj, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('config_obj', (
|
||||
[{
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'arg-per-line',
|
||||
'name': 'Args per line hook',
|
||||
'entry': 'bin/hook.sh',
|
||||
'language': 'script',
|
||||
'files': '',
|
||||
'args': ['hello', 'world'],
|
||||
}],
|
||||
}],
|
||||
[{
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'arg-per-line',
|
||||
'name': 'Args per line hook',
|
||||
'entry': 'bin/hook.sh',
|
||||
'language': 'script',
|
||||
'files': '',
|
||||
'args': ['hello', 'world'],
|
||||
}]
|
||||
}],
|
||||
))
|
||||
def test_config_with_local_hooks_definition_passes(config_obj):
|
||||
schema.validate(config_obj, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
def test_config_schema_does_not_contain_defaults():
|
||||
"""Due to the way our merging works, if this schema has any defaults they
|
||||
will clobber potentially useful values in the backing manifest. #227
|
||||
"""
|
||||
for item in CONFIG_HOOK_DICT.items:
|
||||
assert not isinstance(item, schema.Optional)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('args', 'expected_output'),
|
||||
(
|
||||
(['.pre-commit-hooks.yaml'], 0),
|
||||
(['non_existent_file.yaml'], 1),
|
||||
([get_resource_path('valid_yaml_but_invalid_manifest.yaml')], 1),
|
||||
([get_resource_path('non_parseable_yaml_file.notyaml')], 1),
|
||||
),
|
||||
)
|
||||
def test_validate_manifest_main(args, expected_output):
|
||||
assert validate_manifest_main(args) == expected_output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('manifest_obj', 'expected'),
|
||||
(
|
||||
([], False),
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'files': r'\.py$'
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'language_version': 'python3.4',
|
||||
'files': r'\.py$',
|
||||
}],
|
||||
True,
|
||||
),
|
||||
)
|
||||
)
|
||||
def test_valid_manifests(manifest_obj, expected):
|
||||
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA)
|
||||
assert ret is expected
|
||||
|
|
@ -6,7 +6,7 @@ from collections import OrderedDict
|
|||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib.validate_config import load_config
|
||||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.commands.autoupdate import _update_repo
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
|
||||
|
|
|
|||
|
|
@ -1,91 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import jsonschema.exceptions
|
||||
import pytest
|
||||
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
|
||||
|
||||
def test_apply_defaults_copies_object():
|
||||
input = {}
|
||||
ret = apply_defaults(input, {})
|
||||
assert ret is not input
|
||||
|
||||
|
||||
def test_apply_default_does_not_touch_schema_without_defaults():
|
||||
ret = apply_defaults(
|
||||
{'foo': 'bar'},
|
||||
{'type': 'object', 'properties': {'foo': {}, 'baz': {}}},
|
||||
)
|
||||
assert ret == {'foo': 'bar'}
|
||||
|
||||
|
||||
def test_apply_defaults_applies_defaults():
|
||||
ret = apply_defaults(
|
||||
{'foo': 'bar'},
|
||||
{
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'foo': {'default': 'biz'},
|
||||
'baz': {'default': 'herp'},
|
||||
}
|
||||
}
|
||||
)
|
||||
assert ret == {'foo': 'bar', 'baz': 'herp'}
|
||||
|
||||
|
||||
def test_apply_defaults_deep():
|
||||
ret = apply_defaults(
|
||||
{'foo': {'bar': {}}},
|
||||
{
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'foo': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'bar': {
|
||||
'type': 'object',
|
||||
'properties': {'baz': {'default': 'herp'}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert ret == {'foo': {'bar': {'baz': 'herp'}}}
|
||||
|
||||
|
||||
def test_apply_defaults_copies():
|
||||
schema = {'properties': {'foo': {'default': []}}}
|
||||
ret1 = apply_defaults({}, schema)
|
||||
ret2 = apply_defaults({}, schema)
|
||||
assert ret1['foo'] is not ret2['foo']
|
||||
|
||||
|
||||
def test_remove_defaults_copies_object():
|
||||
input = {}
|
||||
ret = remove_defaults(input, {})
|
||||
assert ret is not input
|
||||
|
||||
|
||||
def test_remove_defaults_does_not_remove_non_default():
|
||||
ret = remove_defaults(
|
||||
{'foo': 'bar'},
|
||||
{'properties': {'foo': {'default': 'baz'}}},
|
||||
)
|
||||
assert ret == {'foo': 'bar'}
|
||||
|
||||
|
||||
def test_remove_defaults_removes_default():
|
||||
ret = remove_defaults(
|
||||
{'foo': 'bar'},
|
||||
{'properties': {'foo': {'default': 'bar'}}},
|
||||
)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('func', (apply_defaults, remove_defaults))
|
||||
def test_still_validates_schema(func):
|
||||
with pytest.raises(jsonschema.exceptions.ValidationError):
|
||||
func({}, {'properties': {'foo': {}}, 'required': ['foo']})
|
||||
|
|
@ -20,6 +20,7 @@ def test_manifest_contents(manifest):
|
|||
# Should just retrieve the manifest contents
|
||||
assert manifest.manifest_contents == [{
|
||||
'always_run': False,
|
||||
'additional_dependencies': [],
|
||||
'args': [],
|
||||
'description': '',
|
||||
'entry': 'bin/hook.sh',
|
||||
|
|
@ -28,7 +29,7 @@ def test_manifest_contents(manifest):
|
|||
'id': 'bash_hook',
|
||||
'language': 'script',
|
||||
'language_version': 'default',
|
||||
'minimum_pre_commit_version': '0.0.0',
|
||||
'minimum_pre_commit_version': '0',
|
||||
'name': 'Bash hook',
|
||||
'stages': [],
|
||||
}]
|
||||
|
|
@ -37,6 +38,7 @@ def test_manifest_contents(manifest):
|
|||
def test_hooks(manifest):
|
||||
assert manifest.hooks['bash_hook'] == {
|
||||
'always_run': False,
|
||||
'additional_dependencies': [],
|
||||
'args': [],
|
||||
'description': '',
|
||||
'entry': 'bin/hook.sh',
|
||||
|
|
@ -45,7 +47,7 @@ def test_hooks(manifest):
|
|||
'id': 'bash_hook',
|
||||
'language': 'script',
|
||||
'language_version': 'default',
|
||||
'minimum_pre_commit_version': '0.0.0',
|
||||
'minimum_pre_commit_version': '0',
|
||||
'name': 'Bash hook',
|
||||
'stages': [],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import pytest
|
|||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit import parse_shebang
|
||||
from pre_commit.clientlib.validate_manifest import load_manifest
|
||||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.languages import golang
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.languages import node
|
||||
|
|
|
|||
391
tests/schema_test.py
Normal file
391
tests/schema_test.py
Normal file
|
|
@ -0,0 +1,391 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from pre_commit.schema import apply_defaults
|
||||
from pre_commit.schema import Array
|
||||
from pre_commit.schema import check_and
|
||||
from pre_commit.schema import check_any
|
||||
from pre_commit.schema import check_array
|
||||
from pre_commit.schema import check_bool
|
||||
from pre_commit.schema import check_regex
|
||||
from pre_commit.schema import check_type
|
||||
from pre_commit.schema import Conditional
|
||||
from pre_commit.schema import load_from_filename
|
||||
from pre_commit.schema import Map
|
||||
from pre_commit.schema import MISSING
|
||||
from pre_commit.schema import Not
|
||||
from pre_commit.schema import Optional
|
||||
from pre_commit.schema import OptionalNoDefault
|
||||
from pre_commit.schema import remove_defaults
|
||||
from pre_commit.schema import Required
|
||||
from pre_commit.schema import RequiredRecurse
|
||||
from pre_commit.schema import validate
|
||||
from pre_commit.schema import ValidationError
|
||||
|
||||
|
||||
def _assert_exception_trace(e, trace):
|
||||
inner = e
|
||||
for ctx in trace[:-1]:
|
||||
assert inner.ctx == ctx
|
||||
inner = inner.error_msg
|
||||
assert inner.error_msg == trace[-1]
|
||||
|
||||
|
||||
def test_ValidationError_simple_str():
|
||||
assert str(ValidationError('error msg')) == (
|
||||
'\n'
|
||||
'=====> error msg'
|
||||
)
|
||||
|
||||
|
||||
def test_ValidationError_nested():
|
||||
error = ValidationError(
|
||||
ValidationError(
|
||||
ValidationError('error msg'),
|
||||
ctx='At line 1',
|
||||
),
|
||||
ctx='In file foo',
|
||||
)
|
||||
assert str(error) == (
|
||||
'\n'
|
||||
'==> In file foo\n'
|
||||
'==> At line 1\n'
|
||||
'=====> error msg'
|
||||
)
|
||||
|
||||
|
||||
def test_check_regex():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
check_regex(str('('))
|
||||
assert excinfo.value.error_msg == "'(' is not a valid python regex"
|
||||
|
||||
|
||||
def test_check_regex_ok():
|
||||
check_regex('^$')
|
||||
|
||||
|
||||
def test_check_array_failed_inner_check():
|
||||
check = check_array(check_bool)
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
check([True, False, 5])
|
||||
_assert_exception_trace(
|
||||
excinfo.value, ('At index 2', 'Expected bool got int'),
|
||||
)
|
||||
|
||||
|
||||
def test_check_array_ok():
|
||||
check_array(check_bool)([True, False])
|
||||
|
||||
|
||||
def test_check_and():
|
||||
check = check_and(check_type(str), check_regex)
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
check(True)
|
||||
assert excinfo.value.error_msg == 'Expected str got bool'
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
check(str('('))
|
||||
assert excinfo.value.error_msg == "'(' is not a valid python regex"
|
||||
|
||||
|
||||
def test_check_and_ok():
|
||||
check = check_and(check_type(str), check_regex)
|
||||
check(str('^$'))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('val', 'expected'),
|
||||
(('bar', True), ('foo', False), (MISSING, False)),
|
||||
)
|
||||
def test_not(val, expected):
|
||||
compared = Not('foo')
|
||||
assert (val == compared) is expected
|
||||
assert (compared == val) is expected
|
||||
|
||||
|
||||
trivial_array_schema = Array(Map('foo', 'id'))
|
||||
|
||||
|
||||
def test_validate_top_level_array_not_an_array():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({}, trivial_array_schema)
|
||||
assert excinfo.value.error_msg == "Expected array but got 'dict'"
|
||||
|
||||
|
||||
def test_validate_top_level_array_no_objects():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate([], trivial_array_schema)
|
||||
assert excinfo.value.error_msg == "Expected at least 1 'foo'"
|
||||
|
||||
|
||||
@pytest.mark.parametrize('v', (({},), [{}]))
|
||||
def test_ok_both_types(v):
|
||||
validate(v, trivial_array_schema)
|
||||
|
||||
|
||||
map_required = Map('foo', 'key', Required('key', check_bool))
|
||||
map_optional = Map('foo', 'key', Optional('key', check_bool, False))
|
||||
map_no_default = Map('foo', 'key', OptionalNoDefault('key', check_bool))
|
||||
|
||||
|
||||
def test_map_wrong_type():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate([], map_required)
|
||||
assert excinfo.value.error_msg == 'Expected a foo map but got a list'
|
||||
|
||||
|
||||
def test_required_missing_key():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({}, map_required)
|
||||
_assert_exception_trace(
|
||||
excinfo.value, ('At foo(key=MISSING)', 'Missing required key: key'),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'schema', (map_required, map_optional, map_no_default),
|
||||
)
|
||||
def test_map_value_wrong_type(schema):
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({'key': 5}, schema)
|
||||
_assert_exception_trace(
|
||||
excinfo.value,
|
||||
('At foo(key=5)', 'At key: key', 'Expected bool got int'),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'schema', (map_required, map_optional, map_no_default),
|
||||
)
|
||||
def test_map_value_correct_type(schema):
|
||||
validate({'key': True}, schema)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('schema', (map_optional, map_no_default))
|
||||
def test_optional_key_missing(schema):
|
||||
validate({}, schema)
|
||||
|
||||
|
||||
map_conditional = Map(
|
||||
'foo', 'key',
|
||||
Conditional(
|
||||
'key2', check_bool, condition_key='key', condition_value=True,
|
||||
),
|
||||
)
|
||||
map_conditional_not = Map(
|
||||
'foo', 'key',
|
||||
Conditional(
|
||||
'key2', check_bool, condition_key='key', condition_value=Not(False),
|
||||
),
|
||||
)
|
||||
map_conditional_absent = Map(
|
||||
'foo', 'key',
|
||||
Conditional(
|
||||
'key2', check_bool,
|
||||
condition_key='key', condition_value=True, ensure_absent=True,
|
||||
),
|
||||
)
|
||||
map_conditional_absent_not = Map(
|
||||
'foo', 'key',
|
||||
Conditional(
|
||||
'key2', check_bool,
|
||||
condition_key='key', condition_value=Not(True), ensure_absent=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('schema', (map_conditional, map_conditional_not))
|
||||
@pytest.mark.parametrize(
|
||||
'v',
|
||||
(
|
||||
# Conditional check passes, key2 is checked and passes
|
||||
{'key': True, 'key2': True},
|
||||
# Conditional check fails, key2 is not checked
|
||||
{'key': False, 'key2': 'ohai'},
|
||||
),
|
||||
)
|
||||
def test_ok_conditional_schemas(v, schema):
|
||||
validate(v, schema)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('schema', (map_conditional, map_conditional_not))
|
||||
def test_not_ok_conditional_schemas(schema):
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({'key': True, 'key2': 5}, schema)
|
||||
_assert_exception_trace(
|
||||
excinfo.value,
|
||||
('At foo(key=True)', 'At key: key2', 'Expected bool got int'),
|
||||
)
|
||||
|
||||
|
||||
def test_ensure_absent_conditional():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({'key': False, 'key2': True}, map_conditional_absent)
|
||||
_assert_exception_trace(
|
||||
excinfo.value,
|
||||
(
|
||||
'At foo(key=False)',
|
||||
'Expected key2 to be absent when key is not True, '
|
||||
'found key2: True',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def test_ensure_absent_conditional_not():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({'key': True, 'key2': True}, map_conditional_absent_not)
|
||||
_assert_exception_trace(
|
||||
excinfo.value,
|
||||
(
|
||||
'At foo(key=True)',
|
||||
'Expected key2 to be absent when key is True, '
|
||||
'found key2: True',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def test_no_error_conditional_absent():
|
||||
validate({}, map_conditional_absent)
|
||||
validate({}, map_conditional_absent_not)
|
||||
validate({'key2': True}, map_conditional_absent)
|
||||
validate({'key2': True}, map_conditional_absent_not)
|
||||
|
||||
|
||||
def test_apply_defaults_copies_object():
|
||||
val = {}
|
||||
ret = apply_defaults(val, map_optional)
|
||||
assert ret is not val
|
||||
|
||||
|
||||
def test_apply_defaults_sets_default():
|
||||
ret = apply_defaults({}, map_optional)
|
||||
assert ret == {'key': False}
|
||||
|
||||
|
||||
def test_apply_defaults_does_not_change_non_default():
|
||||
ret = apply_defaults({'key': True}, map_optional)
|
||||
assert ret == {'key': True}
|
||||
|
||||
|
||||
def test_apply_defaults_does_nothing_on_non_optional():
|
||||
ret = apply_defaults({}, map_required)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
def test_apply_defaults_map_in_list():
|
||||
ret = apply_defaults([{}], Array(map_optional))
|
||||
assert ret == [{'key': False}]
|
||||
|
||||
|
||||
def test_remove_defaults_copies_object():
|
||||
val = {'key': False}
|
||||
ret = remove_defaults(val, map_optional)
|
||||
assert ret is not val
|
||||
|
||||
|
||||
def test_remove_defaults_removes_defaults():
|
||||
ret = remove_defaults({'key': False}, map_optional)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
def test_remove_defaults_nothing_to_remove():
|
||||
ret = remove_defaults({}, map_optional)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
def test_remove_defaults_does_not_change_non_default():
|
||||
ret = remove_defaults({'key': True}, map_optional)
|
||||
assert ret == {'key': True}
|
||||
|
||||
|
||||
def test_remove_defaults_map_in_list():
|
||||
ret = remove_defaults([{'key': False}], Array(map_optional))
|
||||
assert ret == [{}]
|
||||
|
||||
|
||||
def test_remove_defaults_does_nothing_on_non_optional():
|
||||
ret = remove_defaults({'key': True}, map_required)
|
||||
assert ret == {'key': True}
|
||||
|
||||
|
||||
nested_schema_required = Map(
|
||||
'Repository', 'repo',
|
||||
Required('repo', check_any),
|
||||
RequiredRecurse('hooks', Array(map_required)),
|
||||
)
|
||||
nested_schema_optional = Map(
|
||||
'Repository', 'repo',
|
||||
Required('repo', check_any),
|
||||
RequiredRecurse('hooks', Array(map_optional)),
|
||||
)
|
||||
|
||||
|
||||
def test_validate_failure_nested():
|
||||
with pytest.raises(ValidationError) as excinfo:
|
||||
validate({'repo': 1, 'hooks': [{}]}, nested_schema_required)
|
||||
_assert_exception_trace(
|
||||
excinfo.value,
|
||||
(
|
||||
'At Repository(repo=1)', 'At key: hooks', 'At foo(key=MISSING)',
|
||||
'Missing required key: key',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def test_apply_defaults_nested():
|
||||
val = {'repo': 'repo1', 'hooks': [{}]}
|
||||
ret = apply_defaults(val, nested_schema_optional)
|
||||
assert ret == {'repo': 'repo1', 'hooks': [{'key': False}]}
|
||||
|
||||
|
||||
def test_remove_defaults_nested():
|
||||
val = {'repo': 'repo1', 'hooks': [{'key': False}]}
|
||||
ret = remove_defaults(val, nested_schema_optional)
|
||||
assert ret == {'repo': 'repo1', 'hooks': [{}]}
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def test_load_from_filename_file_does_not_exist():
|
||||
with pytest.raises(Error) as excinfo:
|
||||
load_from_filename('does_not_exist', map_required, json.loads, Error)
|
||||
assert excinfo.value.args[0].error_msg == 'does_not_exist does not exist'
|
||||
|
||||
|
||||
def test_load_from_filename_fails_load_strategy(tmpdir):
|
||||
f = tmpdir.join('foo.notjson')
|
||||
f.write('totes not json')
|
||||
with pytest.raises(Error) as excinfo:
|
||||
load_from_filename(f.strpath, map_required, json.loads, Error)
|
||||
_assert_exception_trace(
|
||||
excinfo.value.args[0],
|
||||
# ANY is json's error message
|
||||
('File {}'.format(f.strpath), mock.ANY)
|
||||
)
|
||||
|
||||
|
||||
def test_load_from_filename_validation_error(tmpdir):
|
||||
f = tmpdir.join('foo.json')
|
||||
f.write('{}')
|
||||
with pytest.raises(Error) as excinfo:
|
||||
load_from_filename(f.strpath, map_required, json.loads, Error)
|
||||
_assert_exception_trace(
|
||||
excinfo.value.args[0],
|
||||
(
|
||||
'File {}'.format(f.strpath), 'At foo(key=MISSING)',
|
||||
'Missing required key: key',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def test_load_from_filename_applies_defaults(tmpdir):
|
||||
f = tmpdir.join('foo.json')
|
||||
f.write('{}')
|
||||
ret = load_from_filename(f.strpath, map_optional, json.loads, Error)
|
||||
assert ret == {'key': False}
|
||||
Loading…
Add table
Add a link
Reference in a new issue