mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-01-15 21:40:19 -06:00
Replace jsonschema with better error messages
This commit is contained in:
144
pre_commit/clientlib.py
Normal file
144
pre_commit/clientlib.py
Normal file
@@ -0,0 +1,144 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
|
||||
from aspy.yaml import ordered_load
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import schema
|
||||
from pre_commit.errors import FatalError
|
||||
from pre_commit.languages.all import all_languages
|
||||
|
||||
|
||||
def check_language(v):
|
||||
if v not in all_languages:
|
||||
raise schema.ValidationError(
|
||||
'Expected {} to be in {!r}'.format(v, all_languages),
|
||||
)
|
||||
|
||||
|
||||
def _make_argparser(filenames_help):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('filenames', nargs='*', help=filenames_help)
|
||||
parser.add_argument('-V', '--version', action='version', version=C.VERSION)
|
||||
return parser
|
||||
|
||||
|
||||
MANIFEST_HOOK_DICT = schema.Map(
|
||||
'Hook', 'id',
|
||||
|
||||
schema.Required('id', schema.check_string),
|
||||
schema.Required('name', schema.check_string),
|
||||
schema.Required('entry', schema.check_string),
|
||||
schema.Required(
|
||||
'language', schema.check_and(schema.check_string, check_language),
|
||||
),
|
||||
|
||||
schema.Conditional(
|
||||
'files', schema.check_and(schema.check_string, schema.check_regex),
|
||||
condition_key='always_run', condition_value=False, ensure_absent=True,
|
||||
),
|
||||
|
||||
schema.Optional(
|
||||
'additional_dependencies', schema.check_array(schema.check_string), [],
|
||||
),
|
||||
schema.Optional('args', schema.check_array(schema.check_string), []),
|
||||
schema.Optional('always_run', schema.check_bool, False),
|
||||
schema.Optional('description', schema.check_string, ''),
|
||||
schema.Optional(
|
||||
'exclude',
|
||||
schema.check_and(schema.check_string, schema.check_regex),
|
||||
'^$',
|
||||
),
|
||||
schema.Optional('language_version', schema.check_string, 'default'),
|
||||
schema.Optional('minimum_pre_commit_version', schema.check_string, '0'),
|
||||
schema.Optional('stages', schema.check_array(schema.check_string), []),
|
||||
)
|
||||
MANIFEST_SCHEMA = schema.Array(MANIFEST_HOOK_DICT)
|
||||
|
||||
|
||||
class InvalidManifestError(FatalError):
|
||||
pass
|
||||
|
||||
|
||||
load_manifest = functools.partial(
|
||||
schema.load_from_filename,
|
||||
schema=MANIFEST_SCHEMA,
|
||||
load_strategy=ordered_load,
|
||||
exc_tp=InvalidManifestError,
|
||||
)
|
||||
|
||||
|
||||
def validate_manifest_main(argv=None):
|
||||
parser = _make_argparser('Manifest filenames.')
|
||||
args = parser.parse_args(argv)
|
||||
ret = 0
|
||||
for filename in args.filenames:
|
||||
try:
|
||||
load_manifest(filename)
|
||||
except InvalidManifestError as e:
|
||||
print(e)
|
||||
ret = 1
|
||||
return ret
|
||||
|
||||
|
||||
_LOCAL_SENTINEL = 'local'
|
||||
CONFIG_HOOK_DICT = schema.Map(
|
||||
'Hook', 'id',
|
||||
|
||||
schema.Required('id', schema.check_string),
|
||||
|
||||
# All keys in manifest hook dict are valid in a config hook dict, but
|
||||
# are optional.
|
||||
# No defaults are provided here as the config is merged on top of the
|
||||
# manifest.
|
||||
*[
|
||||
schema.OptionalNoDefault(item.key, item.check_fn)
|
||||
for item in MANIFEST_HOOK_DICT.items
|
||||
if item.key != 'id'
|
||||
]
|
||||
)
|
||||
CONFIG_REPO_DICT = schema.Map(
|
||||
'Repository', 'repo',
|
||||
|
||||
schema.Required('repo', schema.check_string),
|
||||
schema.RequiredRecurse('hooks', schema.Array(CONFIG_HOOK_DICT)),
|
||||
|
||||
schema.Conditional(
|
||||
'sha', schema.check_string,
|
||||
condition_key='repo', condition_value=schema.Not(_LOCAL_SENTINEL),
|
||||
ensure_absent=True,
|
||||
),
|
||||
)
|
||||
CONFIG_SCHEMA = schema.Array(CONFIG_REPO_DICT)
|
||||
|
||||
|
||||
def is_local_repo(repo_entry):
|
||||
return repo_entry['repo'] == _LOCAL_SENTINEL
|
||||
|
||||
|
||||
class InvalidConfigError(FatalError):
|
||||
pass
|
||||
|
||||
|
||||
load_config = functools.partial(
|
||||
schema.load_from_filename,
|
||||
schema=CONFIG_SCHEMA,
|
||||
load_strategy=ordered_load,
|
||||
exc_tp=InvalidConfigError,
|
||||
)
|
||||
|
||||
|
||||
def validate_config_main(argv=None):
|
||||
parser = _make_argparser('Config filenames.')
|
||||
args = parser.parse_args(argv)
|
||||
ret = 0
|
||||
for filename in args.filenames:
|
||||
try:
|
||||
load_config(filename)
|
||||
except InvalidConfigError as e:
|
||||
print(e)
|
||||
ret = 1
|
||||
return ret
|
||||
@@ -1,88 +0,0 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import re
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
import yaml
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import output
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
|
||||
|
||||
def is_regex_valid(regex):
|
||||
try:
|
||||
re.compile(regex)
|
||||
return True
|
||||
except re.error:
|
||||
return False
|
||||
|
||||
|
||||
def get_validator(
|
||||
json_schema,
|
||||
exception_type,
|
||||
additional_validation_strategy=lambda obj: None,
|
||||
):
|
||||
"""Returns a function which will validate a yaml file for correctness
|
||||
|
||||
Args:
|
||||
json_schema - JSON schema to validate file with
|
||||
exception_type - Error type to raise on failure
|
||||
additional_validation_strategy - Strategy for additional validation of
|
||||
the object read from the file. The function should either raise
|
||||
exception_type on failure.
|
||||
"""
|
||||
def validate(filename, load_strategy=yaml.load):
|
||||
if not os.path.exists(filename):
|
||||
raise exception_type('File {} does not exist'.format(filename))
|
||||
|
||||
file_contents = open(filename, 'r').read()
|
||||
|
||||
try:
|
||||
obj = load_strategy(file_contents)
|
||||
except Exception as e:
|
||||
raise exception_type(
|
||||
'Invalid yaml: {}\n{}'.format(os.path.relpath(filename), e),
|
||||
)
|
||||
|
||||
try:
|
||||
jsonschema.validate(obj, json_schema)
|
||||
except jsonschema.exceptions.ValidationError as e:
|
||||
raise exception_type(
|
||||
'Invalid content: {}\n{}'.format(
|
||||
os.path.relpath(filename), e
|
||||
),
|
||||
)
|
||||
|
||||
obj = apply_defaults(obj, json_schema)
|
||||
|
||||
additional_validation_strategy(obj)
|
||||
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def get_run_function(filenames_help, validate_strategy, exception_cls):
|
||||
def run(argv=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('filenames', nargs='*', help=filenames_help)
|
||||
parser.add_argument(
|
||||
'-V', '--version', action='version', version=C.VERSION,
|
||||
)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
retval = 0
|
||||
for filename in args.filenames:
|
||||
try:
|
||||
validate_strategy(filename)
|
||||
except exception_cls as e:
|
||||
output.write_line(e.args[0])
|
||||
retval = 1
|
||||
return retval
|
||||
return run
|
||||
@@ -1,93 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from pre_commit.clientlib.validate_base import get_run_function
|
||||
from pre_commit.clientlib.validate_base import get_validator
|
||||
from pre_commit.clientlib.validate_base import is_regex_valid
|
||||
from pre_commit.errors import FatalError
|
||||
|
||||
|
||||
_LOCAL_HOOKS_MAGIC_REPO_STRING = 'local'
|
||||
|
||||
|
||||
def is_local_hooks(repo_entry):
|
||||
return repo_entry['repo'] == _LOCAL_HOOKS_MAGIC_REPO_STRING
|
||||
|
||||
|
||||
class InvalidConfigError(FatalError):
|
||||
pass
|
||||
|
||||
|
||||
CONFIG_JSON_SCHEMA = {
|
||||
'type': 'array',
|
||||
'minItems': 1,
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'repo': {'type': 'string'},
|
||||
'sha': {'type': 'string'},
|
||||
'hooks': {
|
||||
'type': 'array',
|
||||
'minItems': 1,
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {'type': 'string'},
|
||||
'always_run': {'type': 'boolean'},
|
||||
'files': {'type': 'string'},
|
||||
'exclude': {'type': 'string'},
|
||||
'language_version': {'type': 'string'},
|
||||
'args': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'additional_dependencies': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'required': ['id'],
|
||||
}
|
||||
}
|
||||
},
|
||||
'required': ['repo', 'hooks'],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def try_regex(repo, hook, value, field_name):
|
||||
if not is_regex_valid(value):
|
||||
raise InvalidConfigError(
|
||||
'Invalid {} regex at {}, {}: {}'.format(
|
||||
field_name, repo, hook, value,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def validate_config_extra(config):
|
||||
for repo in config:
|
||||
if is_local_hooks(repo):
|
||||
if 'sha' in repo:
|
||||
raise InvalidConfigError(
|
||||
'"sha" property provided for local hooks'
|
||||
)
|
||||
elif 'sha' not in repo:
|
||||
raise InvalidConfigError(
|
||||
'Missing "sha" field for repository {}'.format(repo['repo'])
|
||||
)
|
||||
for hook in repo['hooks']:
|
||||
try_regex(repo, hook['id'], hook.get('files', ''), 'files')
|
||||
try_regex(repo, hook['id'], hook.get('exclude', ''), 'exclude')
|
||||
|
||||
|
||||
load_config = get_validator(
|
||||
CONFIG_JSON_SCHEMA,
|
||||
InvalidConfigError,
|
||||
additional_validation_strategy=validate_config_extra,
|
||||
)
|
||||
|
||||
|
||||
run = get_run_function('Config filenames.', load_config, InvalidConfigError)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(run())
|
||||
@@ -1,103 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from pre_commit.clientlib.validate_base import get_run_function
|
||||
from pre_commit.clientlib.validate_base import get_validator
|
||||
from pre_commit.clientlib.validate_base import is_regex_valid
|
||||
from pre_commit.languages.all import all_languages
|
||||
|
||||
|
||||
class InvalidManifestError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
MANIFEST_JSON_SCHEMA = {
|
||||
'type': 'array',
|
||||
'minItems': 1,
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {'type': 'string'},
|
||||
'always_run': {'type': 'boolean', 'default': False},
|
||||
'name': {'type': 'string'},
|
||||
'description': {'type': 'string', 'default': ''},
|
||||
'entry': {'type': 'string'},
|
||||
'exclude': {'type': 'string', 'default': '^$'},
|
||||
'language': {'type': 'string'},
|
||||
'language_version': {'type': 'string', 'default': 'default'},
|
||||
'minimum_pre_commit_version': {
|
||||
'type': 'string', 'default': '0.0.0',
|
||||
},
|
||||
'files': {'type': 'string'},
|
||||
'stages': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'args': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'additional_dependencies': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'required': ['id', 'name', 'entry', 'language', 'files'],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def validate_languages(hook_config):
|
||||
if hook_config['language'] not in all_languages:
|
||||
raise InvalidManifestError(
|
||||
'Expected language {} for {} to be one of {!r}'.format(
|
||||
hook_config['id'],
|
||||
hook_config['language'],
|
||||
all_languages,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def validate_files(hook_config):
|
||||
if not is_regex_valid(hook_config['files']):
|
||||
raise InvalidManifestError(
|
||||
'Invalid files regex at {}: {}'.format(
|
||||
hook_config['id'], hook_config['files'],
|
||||
)
|
||||
)
|
||||
|
||||
if not is_regex_valid(hook_config.get('exclude', '')):
|
||||
raise InvalidManifestError(
|
||||
'Invalid exclude regex at {}: {}'.format(
|
||||
hook_config['id'], hook_config['exclude'],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def additional_manifest_check(obj):
|
||||
for hook_config in obj:
|
||||
validate_languages(hook_config)
|
||||
validate_files(hook_config)
|
||||
|
||||
|
||||
load_manifest = get_validator(
|
||||
MANIFEST_JSON_SCHEMA,
|
||||
InvalidManifestError,
|
||||
additional_manifest_check,
|
||||
)
|
||||
|
||||
|
||||
run = get_run_function(
|
||||
'Manifest filenames.',
|
||||
load_manifest,
|
||||
InvalidManifestError,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(run())
|
||||
@@ -8,11 +8,11 @@ from aspy.yaml import ordered_load
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import output
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import is_local_hooks
|
||||
from pre_commit.clientlib.validate_config import load_config
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import is_local_repo
|
||||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.repository import Repository
|
||||
from pre_commit.schema import remove_defaults
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cwd
|
||||
@@ -77,7 +77,7 @@ def autoupdate(runner, tags_only):
|
||||
)
|
||||
|
||||
for repo_config in input_configs:
|
||||
if is_local_hooks(repo_config):
|
||||
if is_local_repo(repo_config):
|
||||
output_configs.append(repo_config)
|
||||
continue
|
||||
output.write('Updating {}...'.format(repo_config['repo']))
|
||||
@@ -101,11 +101,9 @@ def autoupdate(runner, tags_only):
|
||||
|
||||
if changed:
|
||||
with open(runner.config_file_path, 'w') as config_file:
|
||||
config_file.write(
|
||||
ordered_dump(
|
||||
remove_defaults(output_configs, CONFIG_JSON_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
)
|
||||
)
|
||||
config_file.write(ordered_dump(
|
||||
remove_defaults(output_configs, CONFIG_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
))
|
||||
|
||||
return retv
|
||||
|
||||
@@ -5,14 +5,20 @@ PY3 = str is not bytes
|
||||
|
||||
if PY2: # pragma: no cover (PY2 only)
|
||||
text = unicode # flake8: noqa
|
||||
string_types = (text, bytes)
|
||||
|
||||
def n(s):
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
else:
|
||||
return s.encode('UTF-8')
|
||||
|
||||
exec("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
else: # pragma: no cover (PY3 only)
|
||||
text = str
|
||||
string_types = (text,)
|
||||
|
||||
def n(s):
|
||||
if isinstance(s, text):
|
||||
@@ -20,6 +26,13 @@ else: # pragma: no cover (PY3 only)
|
||||
else:
|
||||
return s.decode('UTF-8')
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
|
||||
def to_text(s):
|
||||
return s if isinstance(s, text) else s.decode('UTF-8')
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.validators
|
||||
|
||||
|
||||
# From https://github.com/Julian/jsonschema/blob/master/docs/faq.rst
|
||||
def extend_validator_cls(validator_cls, modify):
|
||||
validate_properties = validator_cls.VALIDATORS['properties']
|
||||
|
||||
def new_properties(validator, properties, instance, schema):
|
||||
# Exhaust the validator
|
||||
list(validate_properties(validator, properties, instance, schema))
|
||||
modify(properties, instance)
|
||||
|
||||
return jsonschema.validators.extend(
|
||||
validator_cls, {'properties': new_properties},
|
||||
)
|
||||
|
||||
|
||||
def default_values(properties, instance):
|
||||
for prop, subschema in properties.items():
|
||||
if 'default' in subschema:
|
||||
instance.setdefault(
|
||||
prop, copy.deepcopy(subschema['default']),
|
||||
)
|
||||
|
||||
|
||||
def remove_default_values(properties, instance):
|
||||
for prop, subschema in properties.items():
|
||||
if (
|
||||
'default' in subschema and
|
||||
instance.get(prop) == subschema['default']
|
||||
):
|
||||
del instance[prop]
|
||||
|
||||
|
||||
_AddDefaultsValidator = extend_validator_cls(
|
||||
jsonschema.Draft4Validator, default_values,
|
||||
)
|
||||
_RemoveDefaultsValidator = extend_validator_cls(
|
||||
jsonschema.Draft4Validator, remove_default_values,
|
||||
)
|
||||
|
||||
|
||||
def apply_defaults(obj, schema):
|
||||
obj = copy.deepcopy(obj)
|
||||
_AddDefaultsValidator(schema).validate(obj)
|
||||
return obj
|
||||
|
||||
|
||||
def remove_defaults(obj, schema):
|
||||
obj = copy.deepcopy(obj)
|
||||
_RemoveDefaultsValidator(schema).validate(obj)
|
||||
return obj
|
||||
@@ -6,7 +6,7 @@ import os.path
|
||||
from cached_property import cached_property
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib.validate_manifest import load_manifest
|
||||
from pre_commit.clientlib import load_manifest
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
@@ -13,13 +13,14 @@ from cached_property import cached_property
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import five
|
||||
from pre_commit import git
|
||||
from pre_commit.clientlib.validate_config import is_local_hooks
|
||||
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from pre_commit.clientlib import is_local_repo
|
||||
from pre_commit.clientlib import MANIFEST_HOOK_DICT
|
||||
from pre_commit.languages.all import languages
|
||||
from pre_commit.languages.helpers import environment_dir
|
||||
from pre_commit.manifest import Manifest
|
||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||
from pre_commit.schema import apply_defaults
|
||||
from pre_commit.schema import validate
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
@@ -115,7 +116,7 @@ class Repository(object):
|
||||
|
||||
@classmethod
|
||||
def create(cls, config, store):
|
||||
if is_local_hooks(config):
|
||||
if is_local_repo(config):
|
||||
return LocalRepository(config, store)
|
||||
else:
|
||||
return cls(config, store)
|
||||
@@ -162,7 +163,7 @@ class Repository(object):
|
||||
deps_dict = defaultdict(_UniqueList)
|
||||
for _, hook in self.hooks:
|
||||
deps_dict[(hook['language'], hook['language_version'])].update(
|
||||
hook.get('additional_dependencies', []),
|
||||
hook['additional_dependencies'],
|
||||
)
|
||||
ret = []
|
||||
for (language, version), deps in deps_dict.items():
|
||||
@@ -182,7 +183,7 @@ class Repository(object):
|
||||
"""
|
||||
self.require_installed()
|
||||
language_name = hook['language']
|
||||
deps = hook.get('additional_dependencies', [])
|
||||
deps = hook['additional_dependencies']
|
||||
cmd_runner = self._cmd_runner_from_deps(language_name, deps)
|
||||
return languages[language_name].run_hook(cmd_runner, hook, file_args)
|
||||
|
||||
@@ -207,9 +208,12 @@ class LocalRepository(Repository):
|
||||
return tuple(
|
||||
(
|
||||
hook['id'],
|
||||
_validate_minimum_version(apply_defaults(
|
||||
hook, MANIFEST_JSON_SCHEMA['items'],
|
||||
)),
|
||||
_validate_minimum_version(
|
||||
apply_defaults(
|
||||
validate(hook, MANIFEST_HOOK_DICT),
|
||||
MANIFEST_HOOK_DICT,
|
||||
),
|
||||
),
|
||||
)
|
||||
for hook in self.repo_config['hooks']
|
||||
)
|
||||
@@ -220,7 +224,7 @@ class LocalRepository(Repository):
|
||||
for _, hook in self.hooks:
|
||||
language = hook['language']
|
||||
version = hook['language_version']
|
||||
deps = hook.get('additional_dependencies', [])
|
||||
deps = hook['additional_dependencies']
|
||||
ret.append((
|
||||
self._cmd_runner_from_deps(language, deps),
|
||||
language, version, deps,
|
||||
|
||||
@@ -5,7 +5,7 @@ import os.path
|
||||
from cached_property import cached_property
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.clientlib.validate_config import load_config
|
||||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.repository import Repository
|
||||
from pre_commit.store import Store
|
||||
|
||||
|
||||
279
pre_commit/schema.py
Normal file
279
pre_commit/schema.py
Normal file
@@ -0,0 +1,279 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import io
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pre_commit import five
|
||||
|
||||
|
||||
class ValidationError(ValueError):
|
||||
def __init__(self, error_msg, ctx=None):
|
||||
super(ValidationError, self).__init__(error_msg)
|
||||
self.error_msg = error_msg
|
||||
self.ctx = ctx
|
||||
|
||||
def __str__(self):
|
||||
out = '\n'
|
||||
err = self
|
||||
while err.ctx is not None:
|
||||
out += '==> {}\n'.format(err.ctx)
|
||||
err = err.error_msg
|
||||
out += '=====> {}'.format(err.error_msg)
|
||||
return out
|
||||
|
||||
|
||||
MISSING = collections.namedtuple('Missing', ())()
|
||||
type(MISSING).__repr__ = lambda self: 'MISSING'
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def validate_context(msg):
|
||||
try:
|
||||
yield
|
||||
except ValidationError as e:
|
||||
_, _, tb = sys.exc_info()
|
||||
five.reraise(ValidationError, ValidationError(e, ctx=msg), tb)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def reraise_as(tp):
|
||||
try:
|
||||
yield
|
||||
except ValidationError as e:
|
||||
_, _, tb = sys.exc_info()
|
||||
five.reraise(tp, tp(e), tb)
|
||||
|
||||
|
||||
def _dct_noop(self, dct):
|
||||
pass
|
||||
|
||||
|
||||
def _check_optional(self, dct):
|
||||
if self.key not in dct:
|
||||
return
|
||||
with validate_context('At key: {}'.format(self.key)):
|
||||
self.check_fn(dct[self.key])
|
||||
|
||||
|
||||
def _apply_default_optional(self, dct):
|
||||
dct.setdefault(self.key, self.default)
|
||||
|
||||
|
||||
def _remove_default_optional(self, dct):
|
||||
if dct.get(self.key, MISSING) == self.default:
|
||||
del dct[self.key]
|
||||
|
||||
|
||||
def _require_key(self, dct):
|
||||
if self.key not in dct:
|
||||
raise ValidationError('Missing required key: {}'.format(self.key))
|
||||
|
||||
|
||||
def _check_required(self, dct):
|
||||
_require_key(self, dct)
|
||||
_check_optional(self, dct)
|
||||
|
||||
|
||||
@property
|
||||
def _check_fn_required_recurse(self):
|
||||
def check_fn(val):
|
||||
validate(val, self.schema)
|
||||
return check_fn
|
||||
|
||||
|
||||
def _apply_default_required_recurse(self, dct):
|
||||
dct[self.key] = apply_defaults(dct[self.key], self.schema)
|
||||
|
||||
|
||||
def _remove_default_required_recurse(self, dct):
|
||||
dct[self.key] = remove_defaults(dct[self.key], self.schema)
|
||||
|
||||
|
||||
def _check_conditional(self, dct):
|
||||
if dct.get(self.condition_key, MISSING) == self.condition_value:
|
||||
_check_required(self, dct)
|
||||
elif self.condition_key in dct and self.ensure_absent and self.key in dct:
|
||||
if isinstance(self.condition_value, Not):
|
||||
op = 'is'
|
||||
cond_val = self.condition_value.val
|
||||
else:
|
||||
op = 'is not'
|
||||
cond_val = self.condition_value
|
||||
raise ValidationError(
|
||||
'Expected {key} to be absent when {cond_key} {op} {cond_val!r}, '
|
||||
'found {key}: {val!r}'.format(
|
||||
key=self.key,
|
||||
val=dct[self.key],
|
||||
cond_key=self.condition_key,
|
||||
op=op,
|
||||
cond_val=cond_val,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
Required = collections.namedtuple('Required', ('key', 'check_fn'))
|
||||
Required.check = _check_required
|
||||
Required.apply_default = _dct_noop
|
||||
Required.remove_default = _dct_noop
|
||||
RequiredRecurse = collections.namedtuple('RequiredRecurse', ('key', 'schema'))
|
||||
RequiredRecurse.check = _check_required
|
||||
RequiredRecurse.check_fn = _check_fn_required_recurse
|
||||
RequiredRecurse.apply_default = _apply_default_required_recurse
|
||||
RequiredRecurse.remove_default = _remove_default_required_recurse
|
||||
Optional = collections.namedtuple('Optional', ('key', 'check_fn', 'default'))
|
||||
Optional.check = _check_optional
|
||||
Optional.apply_default = _apply_default_optional
|
||||
Optional.remove_default = _remove_default_optional
|
||||
OptionalNoDefault = collections.namedtuple(
|
||||
'OptionalNoDefault', ('key', 'check_fn'),
|
||||
)
|
||||
OptionalNoDefault.check = _check_optional
|
||||
OptionalNoDefault.apply_default = _dct_noop
|
||||
OptionalNoDefault.remove_default = _dct_noop
|
||||
Conditional = collections.namedtuple(
|
||||
'Conditional',
|
||||
('key', 'check_fn', 'condition_key', 'condition_value', 'ensure_absent'),
|
||||
)
|
||||
Conditional.__new__.__defaults__ = (False,)
|
||||
Conditional.check = _check_conditional
|
||||
Conditional.apply_default = _dct_noop
|
||||
Conditional.remove_default = _dct_noop
|
||||
|
||||
|
||||
class Map(collections.namedtuple('Map', ('object_name', 'id_key', 'items'))):
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, object_name, id_key, *items):
|
||||
return super(Map, cls).__new__(cls, object_name, id_key, items)
|
||||
|
||||
def check(self, v):
|
||||
if not isinstance(v, dict):
|
||||
raise ValidationError('Expected a {} map but got a {}'.format(
|
||||
self.object_name, type(v).__name__,
|
||||
))
|
||||
with validate_context('At {}({}={!r})'.format(
|
||||
self.object_name, self.id_key, v.get(self.id_key, MISSING),
|
||||
)):
|
||||
for item in self.items:
|
||||
item.check(v)
|
||||
|
||||
def apply_defaults(self, v):
|
||||
ret = v.copy()
|
||||
for item in self.items:
|
||||
item.apply_default(ret)
|
||||
return ret
|
||||
|
||||
def remove_defaults(self, v):
|
||||
ret = v.copy()
|
||||
for item in self.items:
|
||||
item.remove_default(ret)
|
||||
return ret
|
||||
|
||||
|
||||
class Array(collections.namedtuple('Array', ('of',))):
|
||||
__slots__ = ()
|
||||
|
||||
def check(self, v):
|
||||
check_array(check_any)(v)
|
||||
if not v:
|
||||
raise ValidationError(
|
||||
"Expected at least 1 '{}'".format(self.of.object_name),
|
||||
)
|
||||
for val in v:
|
||||
validate(val, self.of)
|
||||
|
||||
def apply_defaults(self, v):
|
||||
return [apply_defaults(val, self.of) for val in v]
|
||||
|
||||
def remove_defaults(self, v):
|
||||
return [remove_defaults(val, self.of) for val in v]
|
||||
|
||||
|
||||
class Not(object):
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not MISSING and other != self.val
|
||||
|
||||
|
||||
def check_any(_):
|
||||
pass
|
||||
|
||||
|
||||
def check_type(tp, typename=None):
|
||||
def check_type_fn(v):
|
||||
if not isinstance(v, tp):
|
||||
raise ValidationError(
|
||||
'Expected {} got {}'.format(
|
||||
typename or tp.__name__, type(v).__name__,
|
||||
),
|
||||
)
|
||||
return check_type_fn
|
||||
|
||||
|
||||
check_bool = check_type(bool)
|
||||
check_string = check_type(five.string_types, typename='string')
|
||||
|
||||
|
||||
def check_regex(v):
|
||||
try:
|
||||
re.compile(v)
|
||||
except re.error:
|
||||
raise ValidationError('{!r} is not a valid python regex'.format(v))
|
||||
|
||||
|
||||
def check_array(inner_check):
|
||||
def check_array_fn(v):
|
||||
if not isinstance(v, (list, tuple)):
|
||||
raise ValidationError(
|
||||
'Expected array but got {!r}'.format(type(v).__name__),
|
||||
)
|
||||
|
||||
for i, val in enumerate(v):
|
||||
with validate_context('At index {}'.format(i)):
|
||||
inner_check(val)
|
||||
return check_array_fn
|
||||
|
||||
|
||||
def check_and(*fns):
|
||||
def check(v):
|
||||
for fn in fns:
|
||||
fn(v)
|
||||
return check
|
||||
|
||||
|
||||
def validate(v, schema):
|
||||
schema.check(v)
|
||||
return v
|
||||
|
||||
|
||||
def apply_defaults(v, schema):
|
||||
return schema.apply_defaults(v)
|
||||
|
||||
|
||||
def remove_defaults(v, schema):
|
||||
return schema.remove_defaults(v)
|
||||
|
||||
|
||||
def load_from_filename(filename, schema, load_strategy, exc_tp):
|
||||
with reraise_as(exc_tp):
|
||||
if not os.path.exists(filename):
|
||||
raise ValidationError('{} does not exist'.format(filename))
|
||||
|
||||
with io.open(filename) as f:
|
||||
contents = f.read()
|
||||
|
||||
with validate_context('File {}'.format(filename)):
|
||||
try:
|
||||
data = load_strategy(contents)
|
||||
except Exception as e:
|
||||
raise ValidationError(str(e))
|
||||
|
||||
validate(data, schema)
|
||||
return apply_defaults(data, schema)
|
||||
Reference in New Issue
Block a user