mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-02-19 17:14:43 +04:00
Move pre_commit.schema to cfgv library
This commit is contained in:
parent
8bb4d63d3b
commit
bdad930d71
9 changed files with 58 additions and 791 deletions
|
|
@ -5,25 +5,18 @@ import argparse
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
|
|
||||||
|
import cfgv
|
||||||
from aspy.yaml import ordered_load
|
from aspy.yaml import ordered_load
|
||||||
from identify.identify import ALL_TAGS
|
from identify.identify import ALL_TAGS
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import schema
|
|
||||||
from pre_commit.error_handler import FatalError
|
from pre_commit.error_handler import FatalError
|
||||||
from pre_commit.languages.all import all_languages
|
from pre_commit.languages.all import all_languages
|
||||||
|
|
||||||
|
|
||||||
def check_language(v):
|
|
||||||
if v not in all_languages:
|
|
||||||
raise schema.ValidationError(
|
|
||||||
'Expected {} to be in {!r}'.format(v, all_languages),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_type_tag(tag):
|
def check_type_tag(tag):
|
||||||
if tag not in ALL_TAGS:
|
if tag not in ALL_TAGS:
|
||||||
raise schema.ValidationError(
|
raise cfgv.ValidationError(
|
||||||
'Type tag {!r} is not recognized. '
|
'Type tag {!r} is not recognized. '
|
||||||
'Try upgrading identify and pre-commit?'.format(tag),
|
'Try upgrading identify and pre-commit?'.format(tag),
|
||||||
)
|
)
|
||||||
|
|
@ -36,41 +29,40 @@ def _make_argparser(filenames_help):
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
MANIFEST_HOOK_DICT = schema.Map(
|
MANIFEST_HOOK_DICT = cfgv.Map(
|
||||||
'Hook', 'id',
|
'Hook', 'id',
|
||||||
|
|
||||||
schema.Required('id', schema.check_string),
|
cfgv.Required('id', cfgv.check_string),
|
||||||
schema.Required('name', schema.check_string),
|
cfgv.Required('name', cfgv.check_string),
|
||||||
schema.Required('entry', schema.check_string),
|
cfgv.Required('entry', cfgv.check_string),
|
||||||
schema.Required(
|
cfgv.Required(
|
||||||
'language', schema.check_and(schema.check_string, check_language),
|
'language',
|
||||||
|
cfgv.check_and(cfgv.check_string, cfgv.check_one_of(all_languages)),
|
||||||
),
|
),
|
||||||
|
|
||||||
schema.Optional(
|
cfgv.Optional(
|
||||||
'files', schema.check_and(schema.check_string, schema.check_regex),
|
'files', cfgv.check_and(cfgv.check_string, cfgv.check_regex), '',
|
||||||
'',
|
|
||||||
),
|
),
|
||||||
schema.Optional(
|
cfgv.Optional(
|
||||||
'exclude', schema.check_and(schema.check_string, schema.check_regex),
|
'exclude', cfgv.check_and(cfgv.check_string, cfgv.check_regex), '^$',
|
||||||
'^$',
|
|
||||||
),
|
),
|
||||||
schema.Optional('types', schema.check_array(check_type_tag), ['file']),
|
cfgv.Optional('types', cfgv.check_array(check_type_tag), ['file']),
|
||||||
schema.Optional('exclude_types', schema.check_array(check_type_tag), []),
|
cfgv.Optional('exclude_types', cfgv.check_array(check_type_tag), []),
|
||||||
|
|
||||||
schema.Optional(
|
cfgv.Optional(
|
||||||
'additional_dependencies', schema.check_array(schema.check_string), [],
|
'additional_dependencies', cfgv.check_array(cfgv.check_string), [],
|
||||||
),
|
),
|
||||||
schema.Optional('args', schema.check_array(schema.check_string), []),
|
cfgv.Optional('args', cfgv.check_array(cfgv.check_string), []),
|
||||||
schema.Optional('always_run', schema.check_bool, False),
|
cfgv.Optional('always_run', cfgv.check_bool, False),
|
||||||
schema.Optional('pass_filenames', schema.check_bool, True),
|
cfgv.Optional('pass_filenames', cfgv.check_bool, True),
|
||||||
schema.Optional('description', schema.check_string, ''),
|
cfgv.Optional('description', cfgv.check_string, ''),
|
||||||
schema.Optional('language_version', schema.check_string, 'default'),
|
cfgv.Optional('language_version', cfgv.check_string, 'default'),
|
||||||
schema.Optional('log_file', schema.check_string, ''),
|
cfgv.Optional('log_file', cfgv.check_string, ''),
|
||||||
schema.Optional('minimum_pre_commit_version', schema.check_string, '0'),
|
cfgv.Optional('minimum_pre_commit_version', cfgv.check_string, '0'),
|
||||||
schema.Optional('stages', schema.check_array(schema.check_string), []),
|
cfgv.Optional('stages', cfgv.check_array(cfgv.check_string), []),
|
||||||
schema.Optional('verbose', schema.check_bool, False),
|
cfgv.Optional('verbose', cfgv.check_bool, False),
|
||||||
)
|
)
|
||||||
MANIFEST_SCHEMA = schema.Array(MANIFEST_HOOK_DICT)
|
MANIFEST_SCHEMA = cfgv.Array(MANIFEST_HOOK_DICT)
|
||||||
|
|
||||||
|
|
||||||
class InvalidManifestError(FatalError):
|
class InvalidManifestError(FatalError):
|
||||||
|
|
@ -78,7 +70,7 @@ class InvalidManifestError(FatalError):
|
||||||
|
|
||||||
|
|
||||||
load_manifest = functools.partial(
|
load_manifest = functools.partial(
|
||||||
schema.load_from_filename,
|
cfgv.load_from_filename,
|
||||||
schema=MANIFEST_SCHEMA,
|
schema=MANIFEST_SCHEMA,
|
||||||
load_strategy=ordered_load,
|
load_strategy=ordered_load,
|
||||||
exc_tp=InvalidManifestError,
|
exc_tp=InvalidManifestError,
|
||||||
|
|
@ -101,40 +93,40 @@ def validate_manifest_main(argv=None):
|
||||||
_LOCAL_SENTINEL = 'local'
|
_LOCAL_SENTINEL = 'local'
|
||||||
_META_SENTINEL = 'meta'
|
_META_SENTINEL = 'meta'
|
||||||
|
|
||||||
CONFIG_HOOK_DICT = schema.Map(
|
CONFIG_HOOK_DICT = cfgv.Map(
|
||||||
'Hook', 'id',
|
'Hook', 'id',
|
||||||
|
|
||||||
schema.Required('id', schema.check_string),
|
cfgv.Required('id', cfgv.check_string),
|
||||||
|
|
||||||
# All keys in manifest hook dict are valid in a config hook dict, but
|
# All keys in manifest hook dict are valid in a config hook dict, but
|
||||||
# are optional.
|
# are optional.
|
||||||
# No defaults are provided here as the config is merged on top of the
|
# No defaults are provided here as the config is merged on top of the
|
||||||
# manifest.
|
# manifest.
|
||||||
*[
|
*[
|
||||||
schema.OptionalNoDefault(item.key, item.check_fn)
|
cfgv.OptionalNoDefault(item.key, item.check_fn)
|
||||||
for item in MANIFEST_HOOK_DICT.items
|
for item in MANIFEST_HOOK_DICT.items
|
||||||
if item.key != 'id'
|
if item.key != 'id'
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
CONFIG_REPO_DICT = schema.Map(
|
CONFIG_REPO_DICT = cfgv.Map(
|
||||||
'Repository', 'repo',
|
'Repository', 'repo',
|
||||||
|
|
||||||
schema.Required('repo', schema.check_string),
|
cfgv.Required('repo', cfgv.check_string),
|
||||||
schema.RequiredRecurse('hooks', schema.Array(CONFIG_HOOK_DICT)),
|
cfgv.RequiredRecurse('hooks', cfgv.Array(CONFIG_HOOK_DICT)),
|
||||||
|
|
||||||
schema.Conditional(
|
cfgv.Conditional(
|
||||||
'sha', schema.check_string,
|
'sha', cfgv.check_string,
|
||||||
condition_key='repo',
|
condition_key='repo',
|
||||||
condition_value=schema.NotIn(_LOCAL_SENTINEL, _META_SENTINEL),
|
condition_value=cfgv.NotIn(_LOCAL_SENTINEL, _META_SENTINEL),
|
||||||
ensure_absent=True,
|
ensure_absent=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
CONFIG_SCHEMA = schema.Map(
|
CONFIG_SCHEMA = cfgv.Map(
|
||||||
'Config', None,
|
'Config', None,
|
||||||
|
|
||||||
schema.RequiredRecurse('repos', schema.Array(CONFIG_REPO_DICT)),
|
cfgv.RequiredRecurse('repos', cfgv.Array(CONFIG_REPO_DICT)),
|
||||||
schema.Optional('exclude', schema.check_regex, '^$'),
|
cfgv.Optional('exclude', cfgv.check_regex, '^$'),
|
||||||
schema.Optional('fail_fast', schema.check_bool, False),
|
cfgv.Optional('fail_fast', cfgv.check_bool, False),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -160,7 +152,7 @@ def ordered_load_normalize_legacy_config(contents):
|
||||||
|
|
||||||
|
|
||||||
load_config = functools.partial(
|
load_config = functools.partial(
|
||||||
schema.load_from_filename,
|
cfgv.load_from_filename,
|
||||||
schema=CONFIG_SCHEMA,
|
schema=CONFIG_SCHEMA,
|
||||||
load_strategy=ordered_load_normalize_legacy_config,
|
load_strategy=ordered_load_normalize_legacy_config,
|
||||||
exc_tp=InvalidConfigError,
|
exc_tp=InvalidConfigError,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ from collections import OrderedDict
|
||||||
|
|
||||||
from aspy.yaml import ordered_dump
|
from aspy.yaml import ordered_dump
|
||||||
from aspy.yaml import ordered_load
|
from aspy.yaml import ordered_load
|
||||||
|
from cfgv import remove_defaults
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import output
|
from pre_commit import output
|
||||||
|
|
@ -15,7 +16,6 @@ from pre_commit.clientlib import is_meta_repo
|
||||||
from pre_commit.clientlib import load_config
|
from pre_commit.clientlib import load_config
|
||||||
from pre_commit.commands.migrate_config import migrate_config
|
from pre_commit.commands.migrate_config import migrate_config
|
||||||
from pre_commit.repository import Repository
|
from pre_commit.repository import Repository
|
||||||
from pre_commit.schema import remove_defaults
|
|
||||||
from pre_commit.util import CalledProcessError
|
from pre_commit.util import CalledProcessError
|
||||||
from pre_commit.util import cmd_output
|
from pre_commit.util import cmd_output
|
||||||
from pre_commit.util import cwd
|
from pre_commit.util import cwd
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,12 @@ from __future__ import print_function
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from cfgv import apply_defaults
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import git
|
from pre_commit import git
|
||||||
from pre_commit.clientlib import load_config
|
from pre_commit.clientlib import load_config
|
||||||
from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
||||||
from pre_commit.schema import apply_defaults
|
|
||||||
|
|
||||||
|
|
||||||
def exclude_matches_any(filenames, include, exclude):
|
def exclude_matches_any(filenames, include, exclude):
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,8 @@ from collections import defaultdict
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
from cached_property import cached_property
|
from cached_property import cached_property
|
||||||
|
from cfgv import apply_defaults
|
||||||
|
from cfgv import validate
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import five
|
from pre_commit import five
|
||||||
|
|
@ -22,8 +24,6 @@ from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
||||||
from pre_commit.languages.all import languages
|
from pre_commit.languages.all import languages
|
||||||
from pre_commit.languages.helpers import environment_dir
|
from pre_commit.languages.helpers import environment_dir
|
||||||
from pre_commit.prefix import Prefix
|
from pre_commit.prefix import Prefix
|
||||||
from pre_commit.schema import apply_defaults
|
|
||||||
from pre_commit.schema import validate
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('pre_commit')
|
logger = logging.getLogger('pre_commit')
|
||||||
|
|
|
||||||
|
|
@ -1,291 +0,0 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import contextlib
|
|
||||||
import io
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(ValueError):
|
|
||||||
def __init__(self, error_msg, ctx=None):
|
|
||||||
super(ValidationError, self).__init__(error_msg)
|
|
||||||
self.error_msg = error_msg
|
|
||||||
self.ctx = ctx
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
out = '\n'
|
|
||||||
err = self
|
|
||||||
while err.ctx is not None:
|
|
||||||
out += '==> {}\n'.format(err.ctx)
|
|
||||||
err = err.error_msg
|
|
||||||
out += '=====> {}'.format(err.error_msg)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
MISSING = collections.namedtuple('Missing', ())()
|
|
||||||
type(MISSING).__repr__ = lambda self: 'MISSING'
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def validate_context(msg):
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
except ValidationError as e:
|
|
||||||
_, _, tb = sys.exc_info()
|
|
||||||
six.reraise(ValidationError, ValidationError(e, ctx=msg), tb)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def reraise_as(tp):
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
except ValidationError as e:
|
|
||||||
_, _, tb = sys.exc_info()
|
|
||||||
six.reraise(tp, tp(e), tb)
|
|
||||||
|
|
||||||
|
|
||||||
def _dct_noop(self, dct):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _check_optional(self, dct):
|
|
||||||
if self.key not in dct:
|
|
||||||
return
|
|
||||||
with validate_context('At key: {}'.format(self.key)):
|
|
||||||
self.check_fn(dct[self.key])
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_default_optional(self, dct):
|
|
||||||
dct.setdefault(self.key, self.default)
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_default_optional(self, dct):
|
|
||||||
if dct.get(self.key, MISSING) == self.default:
|
|
||||||
del dct[self.key]
|
|
||||||
|
|
||||||
|
|
||||||
def _require_key(self, dct):
|
|
||||||
if self.key not in dct:
|
|
||||||
raise ValidationError('Missing required key: {}'.format(self.key))
|
|
||||||
|
|
||||||
|
|
||||||
def _check_required(self, dct):
|
|
||||||
_require_key(self, dct)
|
|
||||||
_check_optional(self, dct)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _check_fn_required_recurse(self):
|
|
||||||
def check_fn(val):
|
|
||||||
validate(val, self.schema)
|
|
||||||
return check_fn
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_default_required_recurse(self, dct):
|
|
||||||
dct[self.key] = apply_defaults(dct[self.key], self.schema)
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_default_required_recurse(self, dct):
|
|
||||||
dct[self.key] = remove_defaults(dct[self.key], self.schema)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_conditional(self, dct):
|
|
||||||
if dct.get(self.condition_key, MISSING) == self.condition_value:
|
|
||||||
_check_required(self, dct)
|
|
||||||
elif self.condition_key in dct and self.ensure_absent and self.key in dct:
|
|
||||||
if isinstance(self.condition_value, Not):
|
|
||||||
op = 'is'
|
|
||||||
cond_val = self.condition_value.val
|
|
||||||
elif isinstance(self.condition_value, NotIn):
|
|
||||||
op = 'is any of'
|
|
||||||
cond_val = self.condition_value.values
|
|
||||||
else:
|
|
||||||
op = 'is not'
|
|
||||||
cond_val = self.condition_value
|
|
||||||
raise ValidationError(
|
|
||||||
'Expected {key} to be absent when {cond_key} {op} {cond_val!r}, '
|
|
||||||
'found {key}: {val!r}'.format(
|
|
||||||
key=self.key,
|
|
||||||
val=dct[self.key],
|
|
||||||
cond_key=self.condition_key,
|
|
||||||
op=op,
|
|
||||||
cond_val=cond_val,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Required = collections.namedtuple('Required', ('key', 'check_fn'))
|
|
||||||
Required.check = _check_required
|
|
||||||
Required.apply_default = _dct_noop
|
|
||||||
Required.remove_default = _dct_noop
|
|
||||||
RequiredRecurse = collections.namedtuple('RequiredRecurse', ('key', 'schema'))
|
|
||||||
RequiredRecurse.check = _check_required
|
|
||||||
RequiredRecurse.check_fn = _check_fn_required_recurse
|
|
||||||
RequiredRecurse.apply_default = _apply_default_required_recurse
|
|
||||||
RequiredRecurse.remove_default = _remove_default_required_recurse
|
|
||||||
Optional = collections.namedtuple('Optional', ('key', 'check_fn', 'default'))
|
|
||||||
Optional.check = _check_optional
|
|
||||||
Optional.apply_default = _apply_default_optional
|
|
||||||
Optional.remove_default = _remove_default_optional
|
|
||||||
OptionalNoDefault = collections.namedtuple(
|
|
||||||
'OptionalNoDefault', ('key', 'check_fn'),
|
|
||||||
)
|
|
||||||
OptionalNoDefault.check = _check_optional
|
|
||||||
OptionalNoDefault.apply_default = _dct_noop
|
|
||||||
OptionalNoDefault.remove_default = _dct_noop
|
|
||||||
Conditional = collections.namedtuple(
|
|
||||||
'Conditional',
|
|
||||||
('key', 'check_fn', 'condition_key', 'condition_value', 'ensure_absent'),
|
|
||||||
)
|
|
||||||
Conditional.__new__.__defaults__ = (False,)
|
|
||||||
Conditional.check = _check_conditional
|
|
||||||
Conditional.apply_default = _dct_noop
|
|
||||||
Conditional.remove_default = _dct_noop
|
|
||||||
|
|
||||||
|
|
||||||
class Map(collections.namedtuple('Map', ('object_name', 'id_key', 'items'))):
|
|
||||||
__slots__ = ()
|
|
||||||
|
|
||||||
def __new__(cls, object_name, id_key, *items):
|
|
||||||
return super(Map, cls).__new__(cls, object_name, id_key, items)
|
|
||||||
|
|
||||||
def check(self, v):
|
|
||||||
if not isinstance(v, dict):
|
|
||||||
raise ValidationError('Expected a {} map but got a {}'.format(
|
|
||||||
self.object_name, type(v).__name__,
|
|
||||||
))
|
|
||||||
if self.id_key is None:
|
|
||||||
context = 'At {}()'.format(self.object_name)
|
|
||||||
else:
|
|
||||||
context = 'At {}({}={!r})'.format(
|
|
||||||
self.object_name, self.id_key, v.get(self.id_key, MISSING),
|
|
||||||
)
|
|
||||||
with validate_context(context):
|
|
||||||
for item in self.items:
|
|
||||||
item.check(v)
|
|
||||||
|
|
||||||
def apply_defaults(self, v):
|
|
||||||
ret = v.copy()
|
|
||||||
for item in self.items:
|
|
||||||
item.apply_default(ret)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def remove_defaults(self, v):
|
|
||||||
ret = v.copy()
|
|
||||||
for item in self.items:
|
|
||||||
item.remove_default(ret)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class Array(collections.namedtuple('Array', ('of',))):
|
|
||||||
__slots__ = ()
|
|
||||||
|
|
||||||
def check(self, v):
|
|
||||||
check_array(check_any)(v)
|
|
||||||
if not v:
|
|
||||||
raise ValidationError(
|
|
||||||
"Expected at least 1 '{}'".format(self.of.object_name),
|
|
||||||
)
|
|
||||||
for val in v:
|
|
||||||
validate(val, self.of)
|
|
||||||
|
|
||||||
def apply_defaults(self, v):
|
|
||||||
return [apply_defaults(val, self.of) for val in v]
|
|
||||||
|
|
||||||
def remove_defaults(self, v):
|
|
||||||
return [remove_defaults(val, self.of) for val in v]
|
|
||||||
|
|
||||||
|
|
||||||
class Not(collections.namedtuple('Not', ('val',))):
|
|
||||||
def __eq__(self, other):
|
|
||||||
return other is not MISSING and other != self.val
|
|
||||||
|
|
||||||
|
|
||||||
class NotIn(collections.namedtuple('NotIn', ('values',))):
|
|
||||||
def __new__(cls, *values):
|
|
||||||
return super(NotIn, cls).__new__(cls, values=values)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return other is not MISSING and other not in self.values
|
|
||||||
|
|
||||||
|
|
||||||
def check_any(_):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def check_type(tp, typename=None):
|
|
||||||
def check_type_fn(v):
|
|
||||||
if not isinstance(v, tp):
|
|
||||||
raise ValidationError(
|
|
||||||
'Expected {} got {}'.format(
|
|
||||||
typename or tp.__name__, type(v).__name__,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return check_type_fn
|
|
||||||
|
|
||||||
|
|
||||||
check_bool = check_type(bool)
|
|
||||||
check_string = check_type(six.string_types, typename='string')
|
|
||||||
|
|
||||||
|
|
||||||
def check_regex(v):
|
|
||||||
try:
|
|
||||||
re.compile(v)
|
|
||||||
except re.error:
|
|
||||||
raise ValidationError('{!r} is not a valid python regex'.format(v))
|
|
||||||
|
|
||||||
|
|
||||||
def check_array(inner_check):
|
|
||||||
def check_array_fn(v):
|
|
||||||
if not isinstance(v, (list, tuple)):
|
|
||||||
raise ValidationError(
|
|
||||||
'Expected array but got {!r}'.format(type(v).__name__),
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, val in enumerate(v):
|
|
||||||
with validate_context('At index {}'.format(i)):
|
|
||||||
inner_check(val)
|
|
||||||
return check_array_fn
|
|
||||||
|
|
||||||
|
|
||||||
def check_and(*fns):
|
|
||||||
def check(v):
|
|
||||||
for fn in fns:
|
|
||||||
fn(v)
|
|
||||||
return check
|
|
||||||
|
|
||||||
|
|
||||||
def validate(v, schema):
|
|
||||||
schema.check(v)
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
def apply_defaults(v, schema):
|
|
||||||
return schema.apply_defaults(v)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_defaults(v, schema):
|
|
||||||
return schema.remove_defaults(v)
|
|
||||||
|
|
||||||
|
|
||||||
def load_from_filename(filename, schema, load_strategy, exc_tp):
|
|
||||||
with reraise_as(exc_tp):
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
raise ValidationError('{} does not exist'.format(filename))
|
|
||||||
|
|
||||||
with io.open(filename) as f:
|
|
||||||
contents = f.read()
|
|
||||||
|
|
||||||
with validate_context('File {}'.format(filename)):
|
|
||||||
try:
|
|
||||||
data = load_strategy(contents)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValidationError(str(e))
|
|
||||||
|
|
||||||
validate(data, schema)
|
|
||||||
return apply_defaults(data, schema)
|
|
||||||
1
setup.py
1
setup.py
|
|
@ -37,6 +37,7 @@ setup(
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'aspy.yaml',
|
'aspy.yaml',
|
||||||
'cached-property',
|
'cached-property',
|
||||||
|
'cfgv>=1.0.0',
|
||||||
'identify>=1.0.0',
|
'identify>=1.0.0',
|
||||||
'nodeenv>=0.11.1',
|
'nodeenv>=0.11.1',
|
||||||
'pyyaml',
|
'pyyaml',
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,13 @@ from collections import OrderedDict
|
||||||
|
|
||||||
from aspy.yaml import ordered_dump
|
from aspy.yaml import ordered_dump
|
||||||
from aspy.yaml import ordered_load
|
from aspy.yaml import ordered_load
|
||||||
|
from cfgv import apply_defaults
|
||||||
|
from cfgv import validate
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit import git
|
from pre_commit import git
|
||||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||||
from pre_commit.clientlib import load_manifest
|
from pre_commit.clientlib import load_manifest
|
||||||
from pre_commit.schema import apply_defaults
|
|
||||||
from pre_commit.schema import validate
|
|
||||||
from pre_commit.util import cmd_output
|
from pre_commit.util import cmd_output
|
||||||
from pre_commit.util import copy_tree_to_path
|
from pre_commit.util import copy_tree_to_path
|
||||||
from pre_commit.util import cwd
|
from pre_commit.util import cwd
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import cfgv
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from pre_commit import schema
|
|
||||||
from pre_commit.clientlib import check_language
|
|
||||||
from pre_commit.clientlib import check_type_tag
|
from pre_commit.clientlib import check_type_tag
|
||||||
from pre_commit.clientlib import CONFIG_HOOK_DICT
|
from pre_commit.clientlib import CONFIG_HOOK_DICT
|
||||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||||
|
|
@ -16,29 +15,18 @@ from testing.util import get_resource_path
|
||||||
|
|
||||||
def is_valid_according_to_schema(obj, obj_schema):
|
def is_valid_according_to_schema(obj, obj_schema):
|
||||||
try:
|
try:
|
||||||
schema.validate(obj, obj_schema)
|
cfgv.validate(obj, obj_schema)
|
||||||
return True
|
return True
|
||||||
except schema.ValidationError:
|
except cfgv.ValidationError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('value', ('not a language', 'python3'))
|
|
||||||
def test_check_language_failures(value):
|
|
||||||
with pytest.raises(schema.ValidationError):
|
|
||||||
check_language(value)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))
|
@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))
|
||||||
def test_check_type_tag_failures(value):
|
def test_check_type_tag_failures(value):
|
||||||
with pytest.raises(schema.ValidationError):
|
with pytest.raises(cfgv.ValidationError):
|
||||||
check_type_tag(value)
|
check_type_tag(value)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('value', ('python', 'node', 'pcre'))
|
|
||||||
def test_check_language_ok(value):
|
|
||||||
check_language(value)
|
|
||||||
|
|
||||||
|
|
||||||
def test_is_local_repo():
|
def test_is_local_repo():
|
||||||
assert is_local_repo({'repo': 'local'})
|
assert is_local_repo({'repo': 'local'})
|
||||||
|
|
||||||
|
|
@ -58,7 +46,6 @@ def test_validate_config_main(args, expected_output):
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
('config_obj', 'expected'), (
|
('config_obj', 'expected'), (
|
||||||
([], False),
|
|
||||||
(
|
(
|
||||||
{'repos': [{
|
{'repos': [{
|
||||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||||
|
|
@ -116,8 +103,8 @@ def test_config_with_local_hooks_definition_fails():
|
||||||
'files': '^(.*)$',
|
'files': '^(.*)$',
|
||||||
}],
|
}],
|
||||||
}]}
|
}]}
|
||||||
with pytest.raises(schema.ValidationError):
|
with pytest.raises(cfgv.ValidationError):
|
||||||
schema.validate(config_obj, CONFIG_SCHEMA)
|
cfgv.validate(config_obj, CONFIG_SCHEMA)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
@ -147,7 +134,7 @@ def test_config_with_local_hooks_definition_fails():
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_config_with_local_hooks_definition_passes(config_obj):
|
def test_config_with_local_hooks_definition_passes(config_obj):
|
||||||
schema.validate(config_obj, CONFIG_SCHEMA)
|
cfgv.validate(config_obj, CONFIG_SCHEMA)
|
||||||
|
|
||||||
|
|
||||||
def test_config_schema_does_not_contain_defaults():
|
def test_config_schema_does_not_contain_defaults():
|
||||||
|
|
@ -155,7 +142,7 @@ def test_config_schema_does_not_contain_defaults():
|
||||||
will clobber potentially useful values in the backing manifest. #227
|
will clobber potentially useful values in the backing manifest. #227
|
||||||
"""
|
"""
|
||||||
for item in CONFIG_HOOK_DICT.items:
|
for item in CONFIG_HOOK_DICT.items:
|
||||||
assert not isinstance(item, schema.Optional)
|
assert not isinstance(item, cfgv.Optional)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
@ -174,7 +161,6 @@ def test_validate_manifest_main(args, expected_output):
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
('manifest_obj', 'expected'),
|
('manifest_obj', 'expected'),
|
||||||
(
|
(
|
||||||
([], False),
|
|
||||||
(
|
(
|
||||||
[{
|
[{
|
||||||
'id': 'a',
|
'id': 'a',
|
||||||
|
|
|
||||||
|
|
@ -1,422 +0,0 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
import mock
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from pre_commit.schema import apply_defaults
|
|
||||||
from pre_commit.schema import Array
|
|
||||||
from pre_commit.schema import check_and
|
|
||||||
from pre_commit.schema import check_any
|
|
||||||
from pre_commit.schema import check_array
|
|
||||||
from pre_commit.schema import check_bool
|
|
||||||
from pre_commit.schema import check_regex
|
|
||||||
from pre_commit.schema import check_type
|
|
||||||
from pre_commit.schema import Conditional
|
|
||||||
from pre_commit.schema import load_from_filename
|
|
||||||
from pre_commit.schema import Map
|
|
||||||
from pre_commit.schema import MISSING
|
|
||||||
from pre_commit.schema import Not
|
|
||||||
from pre_commit.schema import NotIn
|
|
||||||
from pre_commit.schema import Optional
|
|
||||||
from pre_commit.schema import OptionalNoDefault
|
|
||||||
from pre_commit.schema import remove_defaults
|
|
||||||
from pre_commit.schema import Required
|
|
||||||
from pre_commit.schema import RequiredRecurse
|
|
||||||
from pre_commit.schema import validate
|
|
||||||
from pre_commit.schema import ValidationError
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_exception_trace(e, trace):
|
|
||||||
inner = e
|
|
||||||
for ctx in trace[:-1]:
|
|
||||||
assert inner.ctx == ctx
|
|
||||||
inner = inner.error_msg
|
|
||||||
assert inner.error_msg == trace[-1]
|
|
||||||
|
|
||||||
|
|
||||||
def test_ValidationError_simple_str():
|
|
||||||
assert str(ValidationError('error msg')) == (
|
|
||||||
'\n'
|
|
||||||
'=====> error msg'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ValidationError_nested():
|
|
||||||
error = ValidationError(
|
|
||||||
ValidationError(
|
|
||||||
ValidationError('error msg'),
|
|
||||||
ctx='At line 1',
|
|
||||||
),
|
|
||||||
ctx='In file foo',
|
|
||||||
)
|
|
||||||
assert str(error) == (
|
|
||||||
'\n'
|
|
||||||
'==> In file foo\n'
|
|
||||||
'==> At line 1\n'
|
|
||||||
'=====> error msg'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_regex():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
check_regex(str('('))
|
|
||||||
assert excinfo.value.error_msg == "'(' is not a valid python regex"
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_regex_ok():
|
|
||||||
check_regex('^$')
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_array_failed_inner_check():
|
|
||||||
check = check_array(check_bool)
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
check([True, False, 5])
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value, ('At index 2', 'Expected bool got int'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_array_ok():
|
|
||||||
check_array(check_bool)([True, False])
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_and():
|
|
||||||
check = check_and(check_type(str), check_regex)
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
check(True)
|
|
||||||
assert excinfo.value.error_msg == 'Expected str got bool'
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
check(str('('))
|
|
||||||
assert excinfo.value.error_msg == "'(' is not a valid python regex"
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_and_ok():
|
|
||||||
check = check_and(check_type(str), check_regex)
|
|
||||||
check(str('^$'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
('val', 'expected'),
|
|
||||||
(('bar', True), ('foo', False), (MISSING, False)),
|
|
||||||
)
|
|
||||||
def test_not(val, expected):
|
|
||||||
compared = Not('foo')
|
|
||||||
assert (val == compared) is expected
|
|
||||||
assert (compared == val) is expected
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
('values', 'expected'),
|
|
||||||
(('bar', True), ('foo', False), (MISSING, False)),
|
|
||||||
)
|
|
||||||
def test_not_in(values, expected):
|
|
||||||
compared = NotIn('baz', 'foo')
|
|
||||||
assert (values == compared) is expected
|
|
||||||
assert (compared == values) is expected
|
|
||||||
|
|
||||||
|
|
||||||
trivial_array_schema = Array(Map('foo', 'id'))
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_top_level_array_not_an_array():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({}, trivial_array_schema)
|
|
||||||
assert excinfo.value.error_msg == "Expected array but got 'dict'"
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_top_level_array_no_objects():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate([], trivial_array_schema)
|
|
||||||
assert excinfo.value.error_msg == "Expected at least 1 'foo'"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('v', (({},), [{}]))
|
|
||||||
def test_ok_both_types(v):
|
|
||||||
validate(v, trivial_array_schema)
|
|
||||||
|
|
||||||
|
|
||||||
map_required = Map('foo', 'key', Required('key', check_bool))
|
|
||||||
map_optional = Map('foo', 'key', Optional('key', check_bool, False))
|
|
||||||
map_no_default = Map('foo', 'key', OptionalNoDefault('key', check_bool))
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_wrong_type():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate([], map_required)
|
|
||||||
assert excinfo.value.error_msg == 'Expected a foo map but got a list'
|
|
||||||
|
|
||||||
|
|
||||||
def test_required_missing_key():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({}, map_required)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value, ('At foo(key=MISSING)', 'Missing required key: key'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'schema', (map_required, map_optional, map_no_default),
|
|
||||||
)
|
|
||||||
def test_map_value_wrong_type(schema):
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'key': 5}, schema)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
('At foo(key=5)', 'At key: key', 'Expected bool got int'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'schema', (map_required, map_optional, map_no_default),
|
|
||||||
)
|
|
||||||
def test_map_value_correct_type(schema):
|
|
||||||
validate({'key': True}, schema)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('schema', (map_optional, map_no_default))
|
|
||||||
def test_optional_key_missing(schema):
|
|
||||||
validate({}, schema)
|
|
||||||
|
|
||||||
|
|
||||||
map_conditional = Map(
|
|
||||||
'foo', 'key',
|
|
||||||
Conditional(
|
|
||||||
'key2', check_bool, condition_key='key', condition_value=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
map_conditional_not = Map(
|
|
||||||
'foo', 'key',
|
|
||||||
Conditional(
|
|
||||||
'key2', check_bool, condition_key='key', condition_value=Not(False),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
map_conditional_absent = Map(
|
|
||||||
'foo', 'key',
|
|
||||||
Conditional(
|
|
||||||
'key2', check_bool,
|
|
||||||
condition_key='key', condition_value=True, ensure_absent=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
map_conditional_absent_not = Map(
|
|
||||||
'foo', 'key',
|
|
||||||
Conditional(
|
|
||||||
'key2', check_bool,
|
|
||||||
condition_key='key', condition_value=Not(True), ensure_absent=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
map_conditional_absent_not_in = Map(
|
|
||||||
'foo', 'key',
|
|
||||||
Conditional(
|
|
||||||
'key2', check_bool,
|
|
||||||
condition_key='key', condition_value=NotIn(1, 2), ensure_absent=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('schema', (map_conditional, map_conditional_not))
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'v',
|
|
||||||
(
|
|
||||||
# Conditional check passes, key2 is checked and passes
|
|
||||||
{'key': True, 'key2': True},
|
|
||||||
# Conditional check fails, key2 is not checked
|
|
||||||
{'key': False, 'key2': 'ohai'},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def test_ok_conditional_schemas(v, schema):
|
|
||||||
validate(v, schema)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('schema', (map_conditional, map_conditional_not))
|
|
||||||
def test_not_ok_conditional_schemas(schema):
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'key': True, 'key2': 5}, schema)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
('At foo(key=True)', 'At key: key2', 'Expected bool got int'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_absent_conditional():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'key': False, 'key2': True}, map_conditional_absent)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
(
|
|
||||||
'At foo(key=False)',
|
|
||||||
'Expected key2 to be absent when key is not True, '
|
|
||||||
'found key2: True',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_absent_conditional_not():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'key': True, 'key2': True}, map_conditional_absent_not)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
(
|
|
||||||
'At foo(key=True)',
|
|
||||||
'Expected key2 to be absent when key is True, '
|
|
||||||
'found key2: True',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_absent_conditional_not_in():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'key': 1, 'key2': True}, map_conditional_absent_not_in)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
(
|
|
||||||
'At foo(key=1)',
|
|
||||||
'Expected key2 to be absent when key is any of (1, 2), '
|
|
||||||
'found key2: True',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_error_conditional_absent():
|
|
||||||
validate({}, map_conditional_absent)
|
|
||||||
validate({}, map_conditional_absent_not)
|
|
||||||
validate({'key2': True}, map_conditional_absent)
|
|
||||||
validate({'key2': True}, map_conditional_absent_not)
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_copies_object():
|
|
||||||
val = {}
|
|
||||||
ret = apply_defaults(val, map_optional)
|
|
||||||
assert ret is not val
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_sets_default():
|
|
||||||
ret = apply_defaults({}, map_optional)
|
|
||||||
assert ret == {'key': False}
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_does_not_change_non_default():
|
|
||||||
ret = apply_defaults({'key': True}, map_optional)
|
|
||||||
assert ret == {'key': True}
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_does_nothing_on_non_optional():
|
|
||||||
ret = apply_defaults({}, map_required)
|
|
||||||
assert ret == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_map_in_list():
|
|
||||||
ret = apply_defaults([{}], Array(map_optional))
|
|
||||||
assert ret == [{'key': False}]
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_copies_object():
|
|
||||||
val = {'key': False}
|
|
||||||
ret = remove_defaults(val, map_optional)
|
|
||||||
assert ret is not val
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_removes_defaults():
|
|
||||||
ret = remove_defaults({'key': False}, map_optional)
|
|
||||||
assert ret == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_nothing_to_remove():
|
|
||||||
ret = remove_defaults({}, map_optional)
|
|
||||||
assert ret == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_does_not_change_non_default():
|
|
||||||
ret = remove_defaults({'key': True}, map_optional)
|
|
||||||
assert ret == {'key': True}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_map_in_list():
|
|
||||||
ret = remove_defaults([{'key': False}], Array(map_optional))
|
|
||||||
assert ret == [{}]
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_does_nothing_on_non_optional():
|
|
||||||
ret = remove_defaults({'key': True}, map_required)
|
|
||||||
assert ret == {'key': True}
|
|
||||||
|
|
||||||
|
|
||||||
nested_schema_required = Map(
|
|
||||||
'Repository', 'repo',
|
|
||||||
Required('repo', check_any),
|
|
||||||
RequiredRecurse('hooks', Array(map_required)),
|
|
||||||
)
|
|
||||||
nested_schema_optional = Map(
|
|
||||||
'Repository', 'repo',
|
|
||||||
Required('repo', check_any),
|
|
||||||
RequiredRecurse('hooks', Array(map_optional)),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_failure_nested():
|
|
||||||
with pytest.raises(ValidationError) as excinfo:
|
|
||||||
validate({'repo': 1, 'hooks': [{}]}, nested_schema_required)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value,
|
|
||||||
(
|
|
||||||
'At Repository(repo=1)', 'At key: hooks', 'At foo(key=MISSING)',
|
|
||||||
'Missing required key: key',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_apply_defaults_nested():
|
|
||||||
val = {'repo': 'repo1', 'hooks': [{}]}
|
|
||||||
ret = apply_defaults(val, nested_schema_optional)
|
|
||||||
assert ret == {'repo': 'repo1', 'hooks': [{'key': False}]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_defaults_nested():
|
|
||||||
val = {'repo': 'repo1', 'hooks': [{'key': False}]}
|
|
||||||
ret = remove_defaults(val, nested_schema_optional)
|
|
||||||
assert ret == {'repo': 'repo1', 'hooks': [{}]}
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_from_filename_file_does_not_exist():
|
|
||||||
with pytest.raises(Error) as excinfo:
|
|
||||||
load_from_filename('does_not_exist', map_required, json.loads, Error)
|
|
||||||
assert excinfo.value.args[0].error_msg == 'does_not_exist does not exist'
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_from_filename_fails_load_strategy(tmpdir):
|
|
||||||
f = tmpdir.join('foo.notjson')
|
|
||||||
f.write('totes not json')
|
|
||||||
with pytest.raises(Error) as excinfo:
|
|
||||||
load_from_filename(f.strpath, map_required, json.loads, Error)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value.args[0],
|
|
||||||
# ANY is json's error message
|
|
||||||
('File {}'.format(f.strpath), mock.ANY),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_from_filename_validation_error(tmpdir):
|
|
||||||
f = tmpdir.join('foo.json')
|
|
||||||
f.write('{}')
|
|
||||||
with pytest.raises(Error) as excinfo:
|
|
||||||
load_from_filename(f.strpath, map_required, json.loads, Error)
|
|
||||||
_assert_exception_trace(
|
|
||||||
excinfo.value.args[0],
|
|
||||||
(
|
|
||||||
'File {}'.format(f.strpath), 'At foo(key=MISSING)',
|
|
||||||
'Missing required key: key',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_from_filename_applies_defaults(tmpdir):
|
|
||||||
f = tmpdir.join('foo.json')
|
|
||||||
f.write('{}')
|
|
||||||
ret = load_from_filename(f.strpath, map_optional, json.loads, Error)
|
|
||||||
assert ret == {'key': False}
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue