From 1999e7b3f51170648b070e4da69dae890ec72feb Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Wed, 29 Jul 2020 14:53:42 -0500 Subject: [PATCH 01/10] Update base project files for Conformity 2.0 (removes support for Python 2) --- Dockerfile | 4 ---- conftest.py | 5 ----- setup.cfg | 5 ++--- setup.py | 20 +++----------------- tasks.py | 5 ----- tox.ini | 13 ++----------- 6 files changed, 7 insertions(+), 45 deletions(-) diff --git a/Dockerfile b/Dockerfile index e298d38..c12ef6d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,10 +9,6 @@ RUN apt-get update && \ RUN add-apt-repository ppa:deadsnakes/ppa && \ apt-get update && \ apt-get install -y \ - python2.7 \ - python2.7-dev \ - python3.4 \ - python3.4-dev \ python3.5 \ python3.5-dev \ python3.6 \ diff --git a/conftest.py b/conftest.py index e8dd3ac..0236ca0 100644 --- a/conftest.py +++ b/conftest.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import sys diff --git a/setup.cfg b/setup.cfg index fc90c50..784dbf6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bdist_wheel] -python-tag=py27.py35.py36.py37.py38 +python-tag=py35.py36.py37.py38 [metadata] license_file = LICENSE @@ -21,12 +21,11 @@ force_sort_within_sections = 1 lines_after_imports = 2 skip_glob=*.git/*,*.env/*,*/docs/*,*/build/*,*/.eggs/*,*.egg-info/*,.tox not_skip=__init__.py -add_imports=__future__.absolute_import,__future__.unicode_literals # Section ordering sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,CURRENT_PROJECT,LOCALFOLDER,TESTS no_lines_before=LOCALFOLDER # Section for third party packages -known_third_party=attr,currint,dateutil,freezegun,py,pycountry,pytest,pytz,six +known_third_party=dateutil,freezegun,py,pycountry,pytest,pytz,six # Section for specific project imports known_current_project=conformity known_tests=tests diff --git a/setup.py b/setup.py index a0ae8ee..46bdc27 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import sys from setuptools import ( @@ -18,10 +13,6 @@ def readme(): return f.read() -currency_requires = [ - 'currint', -] - country_requires = [ 'pycountry<19.7.15;python_version<"3"', 'pycountry>=19.7.15;python_version>="3"', @@ -35,11 +26,11 @@ def readme(): 'freezegun', 'mock;python_version<"3.3"', 'mypy~=0.740;python_version>"3.4"', - 'pytest>4.2,<5.4', + 'pytest', 'pytest-cov', 'pytest-runner', 'pytz', -] + currency_requires + country_requires + spinx_requires +] + country_requires + spinx_requires setup( name='conformity', @@ -57,18 +48,15 @@ def readme(): zip_safe=False, # PEP 561 include_package_data=True, install_requires=[ - 'attrs>=17.4,<20', - 'six', 'typing~=3.7.4;python_version<"3.5"', ], tests_require=tests_require, setup_requires=['pytest-runner'] if {'pytest', 'test', 'ptr'}.intersection(sys.argv) else [], test_suite='tests', extras_require={ - 'currency': currency_requires, 'country': country_requires, 'sphinx': spinx_requires, - 'docs': spinx_requires + country_requires + currency_requires, + 'docs': spinx_requires + country_requires, 'testing': tests_require, }, license='Apache 2.0', @@ -78,8 +66,6 @@ def readme(): 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', diff --git a/tasks.py b/tasks.py index 164f0cd..67e9fca 100644 --- a/tasks.py +++ b/tasks.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from invoke_release.tasks import * # noqa: F403 diff --git a/tox.ini b/tox.ini index c8b5ded..b8a8491 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,7 @@ [tox] envlist = - py{27,35,36,37,38} - py37-attrs{17,18,19} - py{27,37}-flake8 + py{35,36,37,38} + py{37}-flake8 coverage py{37,38}-mypy @@ -10,18 +9,10 @@ envlist = usedevelop=True deps = .[testing] - attrs17: attrs~=17.4 - attrs18: attrs~=18.2 - attrs19: attrs~=19.1 # ipdb commands = pytest --cov-append --cov-fail-under=1 --cov-report= -[testenv:py27-flake8] -skip_install = true -deps = flake8 -commands = flake8 - [testenv:py37-flake8] skip_install = true deps = flake8 From 883bd6b80c53d5f0bbb78ee1854c09e0979edba6 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Wed, 29 Jul 2020 15:37:30 -0500 Subject: [PATCH 02/10] Modify conformity package base to support Python 3+ only. Remove unused utils and currency fields. --- conformity/__init__.py | 5 - conformity/constants.py | 6 - conformity/decorators.py | 28 +++ conformity/error.py | 44 +--- conformity/fields/__init__.py | 5 - conformity/fields/currency.py | 350 -------------------------------- conformity/settings/__init__.py | 63 +++--- conformity/types.py | 54 ++--- conformity/utils.py | 141 ------------- conformity/version.py | 6 - 10 files changed, 86 insertions(+), 616 deletions(-) create mode 100644 conformity/decorators.py delete mode 100644 conformity/fields/currency.py delete mode 100644 conformity/utils.py diff --git a/conformity/__init__.py b/conformity/__init__.py index aff0aab..2b99bdc 100644 --- a/conformity/__init__.py +++ b/conformity/__init__.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from conformity.version import ( __version__, __version_info__, diff --git a/conformity/constants.py b/conformity/constants.py index 31f0ba4..6b6ba43 100644 --- a/conformity/constants.py +++ b/conformity/constants.py @@ -1,9 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - - # Error codes ERROR_CODE_INVALID = 'INVALID' ERROR_CODE_MISSING = 'MISSING' diff --git a/conformity/decorators.py b/conformity/decorators.py new file mode 100644 index 0000000..3678be0 --- /dev/null +++ b/conformity/decorators.py @@ -0,0 +1,28 @@ +from functools import wraps +import typing + +from conformity.fields.base import BaseField + + +T = typing.TypeVar('T', bound=typing.Callable[..., typing.Any]) + + +def validate(func: T) -> T: + type_hints = typing.get_type_hints(func) + + # Collect Conformity fields from annotations + fields = {} + for arg, hint in type_hints.items(): + if issubclass(hint, BaseField): + fields[arg] = hint + + @wraps(func) + def wrapped(**kwargs): + # TODO: Add support for positional arguments + for key, value in kwargs.items(): + field = fields.get(key) + if field is not None: + field.errors(value) + return func(**kwargs) + + return typing.cast(T, wrapped) diff --git a/conformity/error.py b/conformity/error.py index 4b97b70..38a5f5c 100644 --- a/conformity/error.py +++ b/conformity/error.py @@ -1,32 +1,9 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from typing import cast -import warnings - -import six - -# NOTE: The following have been moved to different modules, but are imported -# here for backwards compatibility. These aliases will be removed in -# Conformity 2.0. -from conformity.constants import ( - ERROR_CODE_INVALID, - ERROR_CODE_MISSING, - ERROR_CODE_UNKNOWN, -) -from conformity.types import Error __all__ = ( - 'ERROR_CODE_INVALID', - 'ERROR_CODE_MISSING', - 'ERROR_CODE_UNKNOWN', - 'Error', 'KeywordError', 'PositionalError', 'ValidationError', - 'update_error_pointer', ) @@ -38,26 +15,13 @@ class ValidationError(ValueError): class PositionalError(TypeError): """ - Error raised when you pass positional arguments into a validated function that doesn't support them. + Error raised when you pass positional arguments into a validated function + that doesn't support them. """ class KeywordError(TypeError): """ - Error raised when you pass keyword arguments into a validated function that doesn't support them. + Error raised when you pass keyword arguments into a validated function that + doesn't support them. """ - - -# NOTE: update_error_pointer has been deprecated. Use utils.field:update_pointer -# instead. This alias has been added for backwards compatibility, but it -# will be removed in Conformity 2.0. -def update_error_pointer(error, pointer_or_prefix): - # type: (Error, six.text_type) -> Error - warnings.warn( - 'update_error_pointer has been deprecated and will be removed in Conformity 2.0.', - DeprecationWarning, - stacklevel=2, - ) - - from conformity.fields.utils import update_pointer - return cast(Error, update_pointer(error, pointer_or_prefix)) diff --git a/conformity/fields/__init__.py b/conformity/fields/__init__.py index 168f8f5..282d645 100644 --- a/conformity/fields/__init__.py +++ b/conformity/fields/__init__.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from conformity.fields.basic import ( Anything, Base, diff --git a/conformity/fields/currency.py b/conformity/fields/currency.py deleted file mode 100644 index cbf6533..0000000 --- a/conformity/fields/currency.py +++ /dev/null @@ -1,350 +0,0 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - -import re -from typing import ( - AbstractSet, - Any as AnyType, - Iterable, - List as ListType, - Optional, - Tuple as TupleType, -) -import warnings - -import attr -import currint -import six - -from conformity.constants import ERROR_CODE_INVALID -from conformity.fields.basic import ( - Base, - Constant, - Integer, - Introspection, - UnicodeString, -) -from conformity.fields.structures import Dictionary -from conformity.fields.utils import strip_none -from conformity.types import Error -from conformity.utils import ( - attr_is_int, - attr_is_iterable, - attr_is_optional, - attr_is_set, - attr_is_string, -) - - -__all__ = ( - 'Amount', - 'AmountRequestDictionary', - 'AmountResponseDictionary', - 'AmountString', -) - - -DEFAULT_CURRENCY_CODES = frozenset(currint.currencies.keys()) - - -def _get_errors_for_currency_amount( - currency_code, # type: six.text_type - value, # type: int - valid_currencies, # type: AbstractSet[six.text_type] - gt, # type: Optional[int] - gte, # type: Optional[int] - lt, # type: Optional[int] - lte, # type: Optional[int] -): - errors = [] - - if currency_code not in valid_currencies: - errors.append(Error('Not a valid currency code', code=ERROR_CODE_INVALID)) - if gt is not None and value <= gt: - errors.append(Error('Value not > {}'.format(gt), code=ERROR_CODE_INVALID)) - if lt is not None and value >= lt: - errors.append(Error('Value not < {}'.format(lt), code=ERROR_CODE_INVALID)) - if gte is not None and value < gte: - errors.append(Error('Value not >= {}'.format(gte), code=ERROR_CODE_INVALID)) - if lte is not None and value > lte: - errors.append(Error('Value not <= {}'.format(lte), code=ERROR_CODE_INVALID)) - - return errors - - -@attr.s -class Amount(Base): - """ - Conformity field that ensures that the value is an instance of `currint.Amount` and optionally enforces boundaries - for that amount with the `valid_currencies`, `gt`, `gte`, `lt`, and `lte` arguments. This field requires that - Currint be installed. - """ - - introspect_type = 'currint.Amount' - - valid_currencies = attr.ib( - default=frozenset(), - validator=attr_is_iterable(attr_is_string(), attr_is_set()), - ) # type: AbstractSet[six.text_type] - gt = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - gte = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - lt = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - lte = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def __attrs_post_init__(self): # type: () -> None - if not self.valid_currencies: - self.valid_currencies = DEFAULT_CURRENCY_CODES - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, currint.Amount): - return [Error( - 'Not a currint.Amount instance', - code=ERROR_CODE_INVALID, - )] - - return _get_errors_for_currency_amount( - value.currency.code, - value.value, - self.valid_currencies, - self.gt, - self.gte, - self.lt, - self.lte, - ) - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'valid_currencies': ( - '(all currencies)' if self.valid_currencies is DEFAULT_CURRENCY_CODES else sorted(self.valid_currencies) - ), - 'gt': self.gt, - 'gte': self.gte, - 'lt': self.lt, - 'lte': self.lte, - }) - - -class AmountRequestDictionary(Dictionary): - """ - Conformity field that ensures that the value is a dictionary containing exactly fields `'currency'` and `'value'` - and optionally enforces boundaries for those values with the `valid_currencies`, `gt`, `gte`, `lt`, and `lte` - arguments. This field requires that Currint be installed. No other arguments are supported; `*args` and `**kwargs` - are deprecated and will be removed in Conformity 2.0.0. - """ - - def __init__( - self, - valid_currencies=None, # type: Iterable[six.text_type] - gt=None, # type: Optional[int] - gte=None, # type: Optional[int] - lt=None, # type: Optional[int] - lte=None, # type: Optional[int] - description=None, # type: Optional[six.text_type] - *args, # type: AnyType - **kwargs # type: AnyType - ): - # type: (...) -> None - """ - Construct the field. - - :param valid_currencies: An iterable of valid currencies (if not specified, all valid currencies will be used) - :param gt: If specified, the value must be greater than this - :param gte: If specified, the value must be greater than or equal to this - :param lt: If specified, the value must be less than this - :param lte: If specified, the value must be less than or equal to this - :param description: The description for documentation - :param args: Deprecated, unused, and will be removed in version 2.0.0 - :param kwargs: Deprecated, unused, and will be removed in version 2.0.0 - """ - if valid_currencies is not None and ( - not hasattr(valid_currencies, '__iter__') or - not all(isinstance(c, six.text_type) for c in valid_currencies) - ): - raise TypeError("'valid_currencies' must be an iterable of unicode strings") - - if gt is not None and not isinstance(gt, int): - raise TypeError("'gt' must be an int") - if gte is not None and not isinstance(gte, int): - raise TypeError("'gte' must be an int") - if lt is not None and not isinstance(lt, int): - raise TypeError("'lt' must be an int") - if lte is not None and not isinstance(lte, int): - raise TypeError("'lte' must be an int") - - if args or kwargs: - warnings.warn( - '*args and **kwargs are deprecated in AmountRequestDictionary and will be removed in Conformity 2.0.', - DeprecationWarning, - ) - - super(AmountRequestDictionary, self).__init__( - { - 'currency': Constant(*(valid_currencies or DEFAULT_CURRENCY_CODES)), - 'value': Integer(gt=gt, gte=gte, lt=lt, lte=lte), - }, - optional_keys=(), - allow_extra_keys=False, - description=description, - ) - - -class AmountDictionary(AmountRequestDictionary): - """ - :deprecated: - """ - def __init__( - self, - valid_currencies=None, # type: Iterable[six.text_type] - gt=None, # type: Optional[int] - gte=None, # type: Optional[int] - lt=None, # type: Optional[int] - lte=None, # type: Optional[int] - description=None, # type: Optional[six.text_type] - *args, # type: AnyType - **kwargs # type: AnyType - ): - warnings.warn( - 'AmountDictionary is deprecated and will be removed in Conformity 2.0. ' - 'Use AmountRequestDictionary, instead.', - DeprecationWarning, - ) - - # type ignored due to MyPy bug https://github.com/python/mypy/issues/2582 - super(AmountDictionary, self).__init__( # type: ignore - valid_currencies=valid_currencies, - gt=gt, - gte=gte, - lt=lt, - lte=lte, - description=description, - *args, - **kwargs - ) - - -@attr.s -class AmountString(Base): - """ - Conformity field that ensures that the value is a unicode string matching the format CUR,1234 or CUR:1234, where - the part before the delimiter is a valid currency and the part after the delimiter is an integer. It also optionally - enforces boundaries for those values with the `valid_currencies`, `gt`, `gte`, `lt`, and `lte` arguments. This - field requires that Currint be installed. - """ - - _format = re.compile(r'[,:]') - - introspect_type = 'currency_amount_string' - - valid_currencies = attr.ib( - default=frozenset(), - validator=attr_is_iterable(attr_is_string(), attr_is_set()), - ) # type: AbstractSet[six.text_type] - gt = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - gte = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - lt = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - lte = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def __attrs_post_init__(self): # type: () -> None - if not self.valid_currencies: - self.valid_currencies = DEFAULT_CURRENCY_CODES - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, six.text_type): - return [Error('Not a unicode string currency amount')] - - parts = self._format.split(value) - if len(parts) != 2: - return [Error('Currency string does not match format CUR,1234 or CUR:1234')] - - currency = parts[0] - try: - value = int(parts[1]) - except ValueError: - return [Error('Currency amount {} cannot be converted to an integer'.format(parts[1]))] - - return _get_errors_for_currency_amount( - currency, - value, - self.valid_currencies, - self.gt, - self.gte, - self.lt, - self.lte, - ) - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'valid_currencies': ( - '(all currencies)' if self.valid_currencies is DEFAULT_CURRENCY_CODES else sorted(self.valid_currencies) - ), - 'gt': self.gt, - 'gte': self.gte, - 'lt': self.lt, - 'lte': self.lte, - }) - - -class AmountResponseDictionary(Dictionary): - """ - Conformity field that ensures that the value is a dictionary containing at least fields `'currency'` and `'value'` - and optionally fields `'major_value'` and `'display'`. This field requires that Currint be installed. - """ - - def __init__(self, description=None, major_value_required=True, display_required=True): - # type: (Optional[six.text_type], bool, bool) -> None - """ - Construct the field. - - :param description: The description for documentation - :param major_value_required: By default, `'major_value'` is a required field in the response, but setting this - to `False` makes it optional - :param display_required: By default, `'display'` is a required field in the response, but setting this to - `False` makes it optional - """ - optional_keys = () # type: TupleType[six.text_type, ...] - if not major_value_required: - optional_keys += ('major_value', ) - if not display_required: - optional_keys += ('display', ) - super(AmountResponseDictionary, self).__init__( - { - 'currency': Constant(*DEFAULT_CURRENCY_CODES), - 'value': Integer(), - 'major_value': UnicodeString(), - 'display': UnicodeString(), - }, - optional_keys=optional_keys, - allow_extra_keys=False, - description=description, - ) - - -class CurrencyCodeField(Constant): - """ - An enum field for restricting values to valid currency codes. Permits only current currencies - and uses currint library. - """ - introspect_type = 'currency_code_field' - - def __init__(self, code_filter=lambda x: True, **kwargs): - """ - :param code_filter: If specified, will be called to further filter the available currency codes - :type code_filter: lambda x: bool - """ - - valid_currency_codes = (code for code in DEFAULT_CURRENCY_CODES if code_filter(code)) - super(CurrencyCodeField, self).__init__(*valid_currency_codes, **kwargs) - - def errors(self, value): - if not isinstance(value, six.text_type): - return [Error('Not a unicode string')] - - return super(CurrencyCodeField, self).errors(value) diff --git a/conformity/settings/__init__.py b/conformity/settings/__init__.py index 63feb15..6840f37 100644 --- a/conformity/settings/__init__.py +++ b/conformity/settings/__init__.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import copy import itertools from typing import ( @@ -21,8 +16,6 @@ cast, ) -import six - from conformity import fields from conformity.error import ValidationError from conformity.validator import validate @@ -45,19 +38,11 @@ from abc import ABCMeta as BaseMeta # type: ignore -if six.PY2: - # TODO Always use KeysView, ValuesView, and ItemsView when Python 3-only - SettingsKeysView = List[six.text_type] - SettingsValuesView = List[Any] - SettingsItemsView = List[Tuple[six.text_type, Any]] -else: - SettingsKeysView = KeysView[six.text_type] # type: ignore - SettingsValuesView = ValuesView[Any] # type: ignore - SettingsItemsView = ItemsView[six.text_type, Any] - - -SettingsSchema = Mapping[six.text_type, fields.Base] -SettingsData = Mapping[six.text_type, Any] +SettingsKeysView = KeysView[str] +SettingsValuesView = ValuesView[Any] +SettingsItemsView = ItemsView[str, Any] +SettingsSchema = Mapping[str, fields.Base] +SettingsData = Mapping[str, Any] # noinspection PyShadowingBuiltins _VT = TypeVar('_VT', bound=Any) @@ -90,8 +75,7 @@ class _SettingsMetaclass(BaseMeta): # print('{}: {}'.format(name, bases)) # return super(Meta, mcs).__new__(mcs, name, bases, body) # - # @six.add_metaclass(Meta) - # class Base(Mapping[six.text_type, Any]): + # class Base(Mapping[str, Any], metaclass=Meta): # pass # # class Foo(Base): @@ -135,11 +119,11 @@ def __new__(mcs, name, bases, body): defaults_not_inherited = not any(cls.defaults is b.defaults for b in applicable_bases) chain_of_schemas = itertools.chain( itertools.chain(*(b.schema.items() for b in reversed(applicable_bases))), - cast(Iterable[Tuple[six.text_type, fields.Base]], cls.schema.items() if schema_not_inherited else ()), + cast(Iterable[Tuple[str, fields.Base]], cls.schema.items() if schema_not_inherited else ()), ) chain_of_defaults = itertools.chain( itertools.chain(*(b.defaults.items() for b in reversed(applicable_bases))), - cast(Iterable[Tuple[six.text_type, Any]], cls.defaults.items() if defaults_not_inherited else ()), + cast(Iterable[Tuple[str, Any]], cls.defaults.items() if defaults_not_inherited else ()), ) # Now we define the schema and defaults for this class to be the merged schemas and defaults from above. @@ -149,8 +133,7 @@ def __new__(mcs, name, bases, body): return cls -@six.add_metaclass(_SettingsMetaclass) -class Settings(Mapping[six.text_type, Any]): +class Settings(Mapping[str, Any], metaclass=_SettingsMetaclass): """ Represents settings schemas and defaults that can be inherited and merged across the inheritance hierarchy. @@ -229,7 +212,7 @@ class ImproperlyConfigured(Exception): Raised when validation fails for the configuration data (contents) passed into the constructor or `set(data)`. """ - def __init__(self, data): # type: (SettingsData) -> None + def __init__(self, data: SettingsData) -> None: """ Instantiate a new Settings object and validate its contents. @@ -241,7 +224,7 @@ def __init__(self, data): # type: (SettingsData) -> None self._data = {} # type: SettingsData self.set(data) - def set(self, data): # type: (SettingsData) -> None + def set(self, data: SettingsData) -> None: """ Initialize and validate the configuration data (contents) for this settings object. @@ -276,8 +259,8 @@ def set(self, data): # type: (SettingsData) -> None self._data = settings @classmethod - def _merge_mappings(cls, data, defaults): # type: (SettingsData, SettingsData) -> SettingsData - new_data = {} # type: Dict[six.text_type, Any] + def _merge_mappings(cls, data: SettingsData, defaults: SettingsData) -> SettingsData: + new_data = {} # type: Dict[str, Any] for key in set(itertools.chain(data.keys(), defaults.keys())): if key in data and key in defaults: @@ -292,7 +275,7 @@ def _merge_mappings(cls, data, defaults): # type: (SettingsData, SettingsData) return new_data - def keys(self): # type: () -> SettingsKeysView + def keys(self) -> SettingsKeysView: """ Returns a `KeysView` of the settings data (even in Python 2). @@ -300,7 +283,7 @@ def keys(self): # type: () -> SettingsKeysView """ return cast(SettingsKeysView, self._data.keys()) - def values(self): # type: () -> SettingsValuesView + def values(self) -> SettingsValuesView: """ Returns a `ValuesView` of the settings data (even in Python 2). @@ -308,7 +291,7 @@ def values(self): # type: () -> SettingsValuesView """ return self._data.values() - def items(self): # type: () -> SettingsItemsView + def items(self) -> SettingsItemsView: """ Returns an `ItemsView` of the settings data (even in Python 2). @@ -316,7 +299,7 @@ def items(self): # type: () -> SettingsItemsView """ return cast(SettingsItemsView, self._data.items()) - def get(self, key, default=None): # type: (six.text_type, Optional[_VT]) -> Optional[_VT] + def get( self, key: str, default: Optional[_VT]=None) -> Optional[_VT]: """ Returns the value associated with the given key, or the default if specified as an argument, or `None` if no default is specified. @@ -328,7 +311,7 @@ def get(self, key, default=None): # type: (six.text_type, Optional[_VT]) -> Opt """ return self._data.get(key, default) - def __getitem__(self, key): # type: (six.text_type) -> Any + def __getitem__(self, key:str) -> Any: """ Returns the value associated with the given key, or raises a `KeyError` if it does not exist. @@ -340,7 +323,7 @@ def __getitem__(self, key): # type: (six.text_type) -> Any """ return self._data[key] - def __len__(self): # type: () -> int + def __len__(self) -> int: """ Returns the number of keys in the root of this settings data. @@ -348,7 +331,7 @@ def __len__(self): # type: () -> int """ return len(self._data) - def __iter__(self): # type: () -> Iterator[six.text_type] + def __iter__(self) -> Iterator[str]: """ Returns an iterator over the keys of this settings data. @@ -356,7 +339,7 @@ def __iter__(self): # type: () -> Iterator[six.text_type] """ return iter(self._data) - def __contains__(self, key): # type: (Any) -> bool + def __contains__(self, key: str) -> bool: """ Indicates whether the specific key exists in this settings data. @@ -366,7 +349,7 @@ def __contains__(self, key): # type: (Any) -> bool """ return key in self._data - def __eq__(self, other): # type: (Any) -> bool + def __eq__(self, other: Any) -> bool: """ Indicates whether the other object provided is an instance of the same Settings subclass as this Settings subclass and its settings data matches this settings data. @@ -377,7 +360,7 @@ def __eq__(self, other): # type: (Any) -> bool """ return isinstance(other, self.__class__) and self._data == other._data - def __ne__(self, other): # type: (Any) -> bool + def __ne__(self, other: Any) -> bool: """ Indicates the reverse of __eq__. diff --git a/conformity/types.py b/conformity/types.py index 229cff9..6e560f2 100644 --- a/conformity/types.py +++ b/conformity/types.py @@ -1,20 +1,8 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from typing import ( List, Optional, ) -import attr -import six - -from conformity.utils import ( - attr_is_optional, - attr_is_string, -) from conformity.constants import ( ERROR_CODE_INVALID, WARNING_CODE_WARNING, @@ -28,35 +16,55 @@ ) -@attr.s -class Issue(object): +class Issue: """ Represents an issue found during validation of a value. """ - message = attr.ib(validator=attr_is_string()) # type: six.text_type - pointer = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] + def __init__(self, message: str, pointer: Optional[str]=None) -> None: + selef.message = message + self.pointer = pointer -@attr.s class Error(Issue): """ Represents an error found during validation of a value. """ - code = attr.ib(default=ERROR_CODE_INVALID, validator=attr_is_string()) # type: six.text_type + def __init__( + self, + message: str, + pointer: Optional[str]=None, + code: Optional[str]=None, + ): + super().__init__(message, pointer) + self.code = code or ERROR_CODE_INVALID -@attr.s class Warning(Issue): """ Represents a warning found during validation of a value. """ - code = attr.ib(default=WARNING_CODE_WARNING, validator=attr_is_string()) # type: six.text_type + def __init__( + self, + message: str, + pointer: Optional[str]=None, + code: Optional[str]=None, + ): + super().__init__(message, pointer) + self.code = code or WARNING_CODE_WARNING -@attr.s class Validation(object): - errors = attr.ib(factory=list) # type: List[Error] - warnings = attr.ib(factory=list) # type: List[Warning] + def __init__( + self, + *, + errors: Optional[List[Error]]=None, + warnings: Optional[List[Error]]=None, + ): + self.errors = errors or [] + self.warnings = warnings or [] def __bool__(self): + return self.is_valid() + + def is_valid(self): return bool(self.errors) diff --git a/conformity/utils.py b/conformity/utils.py deleted file mode 100644 index 9751fcc..0000000 --- a/conformity/utils.py +++ /dev/null @@ -1,141 +0,0 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - -import decimal -from typing import ( - Any as AnyType, - Callable, - Optional, - Tuple as TupleType, - Type, - Union, -) - -import attr -import six - - -AttrsValidator = Callable[[AnyType, AnyType, AnyType], None] - - -attr_is_instance = ( - attr.validators.instance_of # type: ignore -) # type: Callable[[Union[Type, TupleType[Type, ...]]], AttrsValidator] -attr_is_optional = attr.validators.optional # type: Callable[[AttrsValidator], AttrsValidator] - - -def attr_is_bool(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is a bool.""" - return attr_is_instance(bool) - - -def attr_is_int(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is an integer.""" - return attr_is_instance(int) - - -def attr_is_number(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is a number.""" - return attr_is_instance((int, float, decimal.Decimal)) - - -def attr_is_set(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is an abstract set.""" - return attr_is_instance((set, frozenset)) - - -def attr_is_string(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is a unicode string.""" - return attr_is_instance(six.text_type) - - -# In Attrs 19.1.0 we can use attr.validators.deep_iterable, but we want to support older versions for a while longer, -# so we use this custom validator for now -def attr_is_iterable( - member_validator, # type: AttrsValidator - iterable_validator=None # type: Optional[AttrsValidator] -): - # type: (...) -> AttrsValidator - """ - The equivalent of `attr.validators.deep_iterable` added in Attrs 19.1.0, but we still support older versions. - """ - - # noinspection PyShadowingNames - def validator(inst, attr, value): - if not hasattr(value, '__iter__'): - raise TypeError( - "'{name}' must be iterable (got {value!r} that is a {actual!r}).".format( - name=attr.name, - actual=value.__class__, - value=value, - ), - attr, - value, - ) - - if iterable_validator: - iterable_validator(inst, attr, value) - - class A(object): - def __init__(self, num): - self.num = num - - @property - def name(self): - return '{}.{}'.format(attr.name, self.num) - - for i, item in enumerate(value): - member_validator(inst, A(i), item) - - return validator - - -def attr_is_instance_or_instance_tuple( - check_type, # type: Union[Type, TupleType[Type, ...]] -): - # type: (...) -> AttrsValidator - """ - Creates an Attrs validator that ensures the argument is a instance of or tuple of instances of the given type. - """ - - # first, some meta META validation - if not isinstance(check_type, type): - if not isinstance(check_type, tuple): - raise TypeError("'check_type' must be a type or tuple of types") - for i, t in enumerate(check_type): - if not isinstance(t, type): - raise TypeError("'check_type[{i}] must be a type or tuple of types".format(i=i)) - - def validator(_instance, attribute, value): - if isinstance(value, check_type): - return - - if not isinstance(value, tuple): - raise TypeError( - "'{name}' must be a {t!r} or a tuple of {t!r} (got {value!r} that is a {actual!r}).".format( - name=attribute.name, - actual=type(value), - value=value, - t=check_type, - ), - attribute, - value, - ) - - for i, item in enumerate(value): - if not isinstance(item, check_type): - raise TypeError( - "'{name}[{i}]' must be a {t!r} (got {value!r} that is a {actual!r}).".format( - i=i, - name=attribute.name, - actual=type(item), - value=item, - t=check_type, - ), - attribute, - item, - ) - - return validator diff --git a/conformity/version.py b/conformity/version.py index c178837..97e071a 100644 --- a/conformity/version.py +++ b/conformity/version.py @@ -1,8 +1,2 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - - __version_info__ = (1, 27, 3) __version__ = '-'.join(filter(None, ['.'.join(map(str, __version_info__[:3])), (__version_info__[3:] or [None])[0]])) From 7b5e37eeed3ec099011ee6e5fa3196b017ee01fc Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Wed, 29 Jul 2020 16:17:00 -0500 Subject: [PATCH 03/10] Add new BaseField and shim for legacy Base field --- conformity/fields/base.py | 78 +++++++++++++++++++++++++++++++++++++ conformity/fields/legacy.py | 39 +++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 conformity/fields/base.py create mode 100644 conformity/fields/legacy.py diff --git a/conformity/fields/base.py b/conformity/fields/base.py new file mode 100644 index 0000000..1ace07b --- /dev/null +++ b/conformity/fields/base.py @@ -0,0 +1,78 @@ +from abc import ( + ABCMeta, + abstractmethod, +) +from typing import ( + Any, + Dict, + List, + Optional, + Tuple, + Type, + Union, +) + +from conformity.types import ( + Error, + Warning, + Validation, +) + +__all__ = ( + 'BaseField', +) + + +# TODO: Not strict enough. Make this JSON serializable type. +# NOTE: removed datetime and decimal types +Introspection = Dict[ + str, + Union[ + int, float, bool, str, None, + List[Any], + Dict[Any, Any], + ], +] + + +class _BaseMeta(ABCMeta): + def __init__(self, name, bases, attrs): + if 'python_type' not in attrs: + raise ValueError( + 'All concrete BaseField subclasses must ' + 'specify a python_type class attribute!' + ) + super().__init__(name, bases, attrs) + + +class BaseField(metaclass=_BaseMeta): + """ + The abstract base class from which all other Conformity fields inherit. It + defines the common `validate()` and `introspect()` interfaces that must be + implemented by BaseField subclasses. + """ + + def __init__(self, description: Optional[str]=None) -> None: + self.description = description + + def errors(self, value: Any) -> List[Error]: + return self.validate(value).errors + + def warnings(self, value: Any) -> List[Warning]: + return self.validate(value).warnings + + @abstractmethod + def validate(self, value: Any) -> Validation: + """ + Interface for field validation. + + Returns a Validation instance containing errors (if any) and, + optionally, a list of warnings and extracted values. + """ + + @abstractmethod + def introspect(self) -> Introspection: + """ + Returns a JSON-serializable dictionary containing introspection data + that can be used to document the schema. + """ diff --git a/conformity/fields/legacy.py b/conformity/fields/legacy.py new file mode 100644 index 0000000..bb52a0b --- /dev/null +++ b/conformity/fields/legacy.py @@ -0,0 +1,39 @@ +from abc import abstractmethod + +from typing import ( + Any, + List, +) + +from conformity.types import ( + Error, + Warning, + Validation, +) +from conformity.fields.base import BaseField + + +class Base(BaseField): + """ + The legacy (Conformity 1.x) base field from which all other legacy fields + inherit. This defines a simple interface for getting a list of validation + errors and recursively introspecting the schema. + """ + @abstractmethod + def errors(self, value: Any) -> List[Error]: + """ + Returns a list of errors with the value. An empty return means that it's + valid. + """ + + def warnings(self, value: Any) -> List[Warning]: + """ + Returns a list of warnings for the field or value. + """ + return [] + + def validate(self, value: Any) -> Validation: + return Validation( + errors=self.errors(value), + warnings=self.warnings(value), + ) From b6543edeedf1c65181b43e38bc77e5f4f2de1e68 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Mon, 3 Aug 2020 16:17:36 -0500 Subject: [PATCH 04/10] Update builtins, email, networking, and temporal fields --- conformity/fields/base.py | 53 +++-- conformity/fields/basic.py | 365 ---------------------------------- conformity/fields/builtin.py | 196 ++++++++++++++++++ conformity/fields/email.py | 124 ++++++------ conformity/fields/net.py | 171 ++++++++-------- conformity/fields/temporal.py | 196 ++++++++---------- conformity/fields/utils.py | 11 +- 7 files changed, 455 insertions(+), 661 deletions(-) delete mode 100644 conformity/fields/basic.py create mode 100644 conformity/fields/builtin.py diff --git a/conformity/fields/base.py b/conformity/fields/base.py index 1ace07b..9bd401b 100644 --- a/conformity/fields/base.py +++ b/conformity/fields/base.py @@ -4,12 +4,8 @@ ) from typing import ( Any, - Dict, List, Optional, - Tuple, - Type, - Union, ) from conformity.types import ( @@ -17,31 +13,39 @@ Warning, Validation, ) +from conformity.typing import Introspection __all__ = ( 'BaseField', ) -# TODO: Not strict enough. Make this JSON serializable type. -# NOTE: removed datetime and decimal types -Introspection = Dict[ - str, - Union[ - int, float, bool, str, None, - List[Any], - Dict[Any, Any], - ], -] - - class _BaseMeta(ABCMeta): def __init__(self, name, bases, attrs): - if 'python_type' not in attrs: + # Validate field definition + try: + valid_type = attrs['valid_type'] + except KeyError: raise ValueError( 'All concrete BaseField subclasses must ' - 'specify a python_type class attribute!' + 'specify a valid_type class attribute!' ) + if 'valid_noun' not in attrs: + # Naively set the type "noun" from the type name + attrs['valid_noun'] = 'a {}'.format(valid_type.__name__) + raise ValueError( + 'All concrete BaseField subclasses must ' + 'specify a valid_noun class attribute!' + ) + if 'introspect_type' not in attrs: + if isinstance(valid_type, tuple): + raise ValueError( + 'introspect_type must be defined for field {} ' + 'when valid_type is a tuple' + ) + # If unset, infer the introspection type from the type name + attrs['introspect_type'] = valid_type.__name__ + super().__init__(name, bases, attrs) @@ -52,7 +56,7 @@ class BaseField(metaclass=_BaseMeta): implemented by BaseField subclasses. """ - def __init__(self, description: Optional[str]=None) -> None: + def __init__(self, *, description: str=None) -> None: self.description = description def errors(self, value: Any) -> List[Error]: @@ -61,7 +65,6 @@ def errors(self, value: Any) -> List[Error]: def warnings(self, value: Any) -> List[Warning]: return self.validate(value).warnings - @abstractmethod def validate(self, value: Any) -> Validation: """ Interface for field validation. @@ -69,10 +72,18 @@ def validate(self, value: Any) -> Validation: Returns a Validation instance containing errors (if any) and, optionally, a list of warnings and extracted values. """ + errors = [] + if not isinstance(value, self.valid_type): + errors.append(Error('Value is not {}'.format(self.valid_noun))) + return Validation(errors=errors) + - @abstractmethod def introspect(self) -> Introspection: """ Returns a JSON-serializable dictionary containing introspection data that can be used to document the schema. """ + return strip_none({ + 'introspect_type': self.introspect_type, + 'description': self.description, + }) diff --git a/conformity/fields/basic.py b/conformity/fields/basic.py deleted file mode 100644 index c77a365..0000000 --- a/conformity/fields/basic.py +++ /dev/null @@ -1,365 +0,0 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - -import datetime -import decimal -from typing import ( - Any as AnyType, - Dict, - List as ListType, - Optional, - Tuple as TupleType, - Type, - Union, -) - -import attr -import six - -from conformity.constants import ERROR_CODE_UNKNOWN -from conformity.fields.utils import strip_none -from conformity.types import ( - Error, - Warning, - Validation, -) -from conformity.utils import ( - AttrsValidator, - attr_is_bool, - attr_is_instance, - attr_is_int, - attr_is_number, - attr_is_optional, - attr_is_string, -) - - -# Blah ... until recursive types are supported, this is not strict enough: https://github.com/python/mypy/issues/731 -Introspection = Dict[ - six.text_type, - Union[ - int, float, bool, six.text_type, decimal.Decimal, datetime.datetime, datetime.date, datetime.time, None, - ListType[AnyType], - Dict[AnyType, AnyType], - ], -] - - -@attr.s -class Base(object): - """ - The base Conformity field from which all Conformity fields must inherit, this defines a simple interface for - getting a list of validation errors and recursively introspecting the schema. All fields should accept a - `description` argument for use in documentation and introspection. - """ - def errors(self, value): # type: (AnyType) -> ListType[Error] - """ - Returns a list of errors with the value. An empty return means that it's valid. - """ - return [Error('Validation not implemented on base type')] - - def warnings(self, value): # type: (AnyType) -> ListType[Warning] - """ - Returns a list of warnings for the field or value. - """ - return [] - - def validate(self, value): - # type: (AnyType) -> Validation - return Validation( - errors=self.errors(value), - warnings=self.warnings(value), - ) - - def introspect(self): # type: () -> Introspection - """ - Returns a JSON-serializable dictionary containing introspection data that can be used to document the schema. - """ - raise NotImplementedError('You must override introspect() in a subclass') - - -def attr_is_conformity_field(): # type: () -> AttrsValidator - """Creates an Attrs validator that ensures the argument is a Conformity field (extends `Base`).""" - return attr_is_instance(Base) - - -class Constant(Base): - """ - Conformity field that ensures that the value exactly matches the constant value supplied or, if multiple constant - values are supplied, exactly matches one of those values. - """ - - introspect_type = 'constant' - - def __init__(self, *args, **kwargs): # type: (*AnyType, **AnyType) -> None - self.values = frozenset(args) - if not self.values: - raise ValueError('You must provide at least one constant value') - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - # Check they didn't pass any other kwargs - if kwargs: - raise TypeError('Invalid keyword arguments for Constant: {}'.format(kwargs.keys())) - - def _repr(cv): - return '"{}"'.format(cv) if isinstance(cv, six.string_types) else '{}'.format(cv) - - if len(self.values) == 1: - self._error_message = 'Value is not {}'.format(_repr(tuple(self.values)[0])) - else: - self._error_message = 'Value is not one of: {}'.format(', '.join(sorted(_repr(v) for v in self.values))) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - try: - is_valid = value in self.values - except TypeError: - # Unhashable values can't be used for membership checks. - is_valid = False - - if not is_valid: - return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'values': [ - s if isinstance(s, (six.text_type, bool, int, float, type(None))) else six.text_type(s) - for s in sorted(self.values, key=six.text_type) - ], - 'description': self.description, - }) - - -@attr.s -class Anything(Base): - """ - Conformity field that allows the value to be literally anything. - """ - - introspect_type = 'anything' - - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def errors(self, value): # type: (AnyType) -> ListType[Error] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) - - -@attr.s -class Hashable(Anything): - """ - Conformity field that ensures that the value is hashable (`hash(...)` can be called on the value without error). - """ - - introspect_type = 'hashable' - - def errors(self, value): # type: (AnyType) -> ListType[Error] - try: - hash(value) - except TypeError: - return [ - Error('Value is not hashable'), - ] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) - - -@attr.s -class Boolean(Base): - """ - Conformity field that ensures that the value is a boolean. - """ - - introspect_type = 'boolean' - - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, bool): - return [ - Error('Not a boolean'), - ] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) - - -@attr.s -class Integer(Base): - """ - Conformity field that ensures that the value is an integer and optionally enforces boundaries for that integer with - the `gt`, `gte`, `lt`, and `lte` arguments. - """ - - valid_type = six.integer_types # type: Union[Type, TupleType[Type, ...]] - valid_noun = 'an integer' # type: six.text_type - introspect_type = 'integer' # type: six.text_type - - gt = attr.ib( - default=None, - validator=attr_is_optional(attr_is_number()), - ) # type: Optional[Union[int, float, decimal.Decimal]] - gte = attr.ib( - default=None, - validator=attr_is_optional(attr_is_number()), - ) # type: Optional[Union[int, float, decimal.Decimal]] - lt = attr.ib( - default=None, - validator=attr_is_optional(attr_is_number()), - ) # type: Optional[Union[int, float, decimal.Decimal]] - lte = attr.ib( - default=None, - validator=attr_is_optional(attr_is_number()), - ) # type: Optional[Union[int, float, decimal.Decimal]] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, self.valid_type) or isinstance(value, bool): - return [Error('Not {}'.format(self.valid_noun))] - - errors = [] - if self.gt is not None and value <= self.gt: - errors.append(Error('Value not > {}'.format(self.gt))) - if self.lt is not None and value >= self.lt: - errors.append(Error('Value not < {}'.format(self.lt))) - if self.gte is not None and value < self.gte: - errors.append(Error('Value not >= {}'.format(self.gte))) - if self.lte is not None and value > self.lte: - errors.append(Error('Value not <= {}'.format(self.lte))) - return errors - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'gt': self.gt, - 'gte': self.gte, - 'lt': self.lt, - 'lte': self.lte, - }) - - -@attr.s -class Float(Integer): - """ - Conformity field that ensures that the value is a float and optionally enforces boundaries for that float with - the `gt`, `gte`, `lt`, and `lte` arguments. - """ - - valid_type = six.integer_types + (float,) # type: ignore # see https://github.com/python/mypy/issues/224 - valid_noun = 'a float' - introspect_type = 'float' - - -@attr.s -class Decimal(Integer): - """ - Conformity field that ensures that the value is a `decimal.Decimal` and optionally enforces boundaries for that - decimal with the `gt`, `gte`, `lt`, and `lte` arguments. - """ - - valid_type = decimal.Decimal - valid_noun = 'a decimal' - introspect_type = 'decimal' - - -@attr.s -class UnicodeString(Base): - """ - Conformity field that ensures that the value is a unicode string (`str` in Python 3, `unicode` in Python 2) and - optionally enforces minimum and maximum lengths with the `min_length`, `max_length`, and `allow_blank` arguments. - """ - - valid_type = six.text_type # type: Type - valid_noun = 'unicode string' - introspect_type = 'unicode' - - min_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - max_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - allow_blank = attr.ib(default=True, validator=attr_is_bool()) # type: bool - - def __attrs_post_init__(self): # type: () -> None - if self.min_length is not None and self.max_length is not None and self.min_length > self.max_length: - raise ValueError('min_length cannot be greater than max_length in UnicodeString') - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, self.valid_type): - return [Error('Not a {}'.format(self.valid_noun))] - elif self.min_length is not None and len(value) < self.min_length: - return [Error('String must have a length of at least {}'.format(self.min_length))] - elif self.max_length is not None and len(value) > self.max_length: - return [Error('String must have a length no more than {}'.format(self.max_length))] - elif not (self.allow_blank or value.strip()): - return [Error('String cannot be blank')] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'min_length': self.min_length, - 'max_length': self.max_length, - 'allow_blank': self.allow_blank and None, # if the default True, hide it from introspection - }) - - -@attr.s -class ByteString(UnicodeString): - """ - Conformity field that ensures that the value is a byte string (`bytes` in Python 3, `str` in Python 2) and - optionally enforces minimum and maximum lengths with the `min_length`, `max_length`, and `allow_blank` arguments. - """ - - valid_type = six.binary_type - valid_noun = 'byte string' - introspect_type = 'bytes' - - -@attr.s -class UnicodeDecimal(Base): - """ - Conformity field that ensures that the value is a unicode string that is also a valid decimal and can successfully - be converted to a `decimal.Decimal`. - """ - - introspect_type = 'unicode_decimal' - - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, six.text_type): - return [ - Error('Invalid decimal value (not unicode string)'), - ] - try: - decimal.Decimal(value) - except decimal.InvalidOperation: - return [ - Error('Invalid decimal value (parse error)'), - ] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) diff --git a/conformity/fields/builtin.py b/conformity/fields/builtin.py new file mode 100644 index 0000000..0272c37 --- /dev/null +++ b/conformity/fields/builtin.py @@ -0,0 +1,196 @@ +import datetime +import decimal +from typing import ( + Any as AnyType, + Dict, + List as ListType, + Optional, + Tuple as TupleType, + Type, + Union, +) + +import attr +import six + +from conformity.constants import ERROR_CODE_UNKNOWN +from conformity.fields.utils import strip_none +from conformity.fields.base import BaseField +from conformity.types import ( + Error, + Warning, + Validation, +) +from conformity.typing import Introspection + + +# TODO: update +class Constant(BaseField): + """ + Conformity field that ensures that the value exactly matches the constant + value supplied or, if multiple constant values are supplied, exactly matches + one of those values. + """ + + introspect_type = 'constant' + + def __init__(self, *args, **kwargs): # type: (*AnyType, **AnyType) -> None + self.values = frozenset(args) + if not self.values: + raise ValueError('You must provide at least one constant value') + self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] + if self.description and not isinstance(self.description, six.text_type): + raise TypeError("'description' must be a unicode string") + # Check they didn't pass any other kwargs + if kwargs: + raise TypeError('Invalid keyword arguments for Constant: {}'.format(kwargs.keys())) + + def _repr(cv): + return '"{}"'.format(cv) if isinstance(cv, six.string_types) else '{}'.format(cv) + + if len(self.values) == 1: + self._error_message = 'Value is not {}'.format(_repr(tuple(self.values)[0])) + else: + self._error_message = 'Value is not one of: {}'.format(', '.join(sorted(_repr(v) for v in self.values))) + + def errors(self, value): # type: (AnyType) -> ListType[Error] + try: + is_valid = value in self.values + except TypeError: + # Unhashable values can't be used for membership checks. + is_valid = False + + if not is_valid: + return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)] + return [] + + def introspect(self) -> Introspection: + return strip_none({ + 'type': self.introspect_type, + 'values': [ + s if isinstance(s, (six.text_type, bool, int, float, type(None))) else six.text_type(s) + for s in sorted(self.values, key=six.text_type) + ], + 'description': self.description, + }) + + +class Anything(BaseField): + """ + Validates that the value can be anything. + """ + + introspect_type = 'anything' + + def validate(self, value: AnyType) -> Validation: + return Validation() + + +class Boolean(BaseField): + """ + Validates that the value is a boolean. + """ + + valid_type = bool + valid_noun = 'a boolean' + introspect_type = 'boolean' + + +class Integer(Number): + """ + Validates that the value is an integer + """ + + valid_type = int + valid_noun = 'an integer' + introspect_type = 'integer' + + +class Float(Number): + """ + Validates that the value is a float + """ + + valid_type = float + + +class Decimal(Number): + """ + Conformity field that ensures that the value is a `decimal.Decimal` and optionally enforces boundaries for that + decimal with the `gt`, `gte`, `lt`, and `lte` arguments. + """ + + valid_type = decimal.Decimal + + +class String(Sized): + """ + Validates that the value is a string. Optionally validates that the string + is not blank. + """ + valid_type = str + introspect_type = 'string' + + def __init__(self, *, allow_blank: bool=True, **kwargs): + super().__init__(**kwargs) + self.allow_blank = allow_blank + + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.is_valid(): + # TODO: implement "should continue" instead of using is_valid() + # here and elsewhere. + if not (self.allow_blank or value.strip()): + v.errors.append(Error('Value cannot be blank')) + return v + + def introspect(self) -> Introspection: + return strip_none({ + 'allow_blank': self.allow_blank, + }).update(super().introspect()) + + +class Bytes(Sized): + """ + Validate that the value is a byte string + """ + + valid_type = bytes + valid_noun = 'a byte string' + valid_noun = 'byte string' + + +# Deprecated Conformity 1.x aliases +UnicodeString = String +ByteString = Bytes + + +@attr.s +class UnicodeDecimal(Base): + """ + Conformity field that ensures that the value is a unicode string that is also a valid decimal and can successfully + be converted to a `decimal.Decimal`. + """ + + introspect_type = 'unicode_decimal' + + description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] + + def errors(self, value): # type: (AnyType) -> ListType[Error] + if not isinstance(value, six.text_type): + return [ + Error('Invalid decimal value (not unicode string)'), + ] + try: + decimal.Decimal(value) + except decimal.InvalidOperation: + return [ + Error('Invalid decimal value (parse error)'), + ] + return [] + + def introspect(self): # type: () -> Introspection + return strip_none({ + 'type': self.introspect_type, + 'description': self.description, + }) diff --git a/conformity/fields/email.py b/conformity/fields/email.py index 6a73ce8..777e413 100644 --- a/conformity/fields/email.py +++ b/conformity/fields/email.py @@ -1,41 +1,32 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import re from typing import ( - Any as AnyType, + Any, Iterable, - List as ListType, ) -import warnings -import six - -from conformity.fields.basic import ( - Introspection, - UnicodeString, -) +from conformity.fields.builtin import String from conformity.fields.utils import strip_none from conformity.fields.net import IPAddress from conformity.types import Error +from conformity.typing import Introspection + +__all__ = ( + 'EmailAddress', +) class EmailAddress(UnicodeString): """ - Conformity field that ensures that the value is a unicode string that is a valid email address according to - RFC 2822 and optionally accepts non-compliant fields listed in the `whitelist` argument. Substantially copied from - Django (v2.0.x): https://github.com/django/django/blob/stable/2.0.x/django/core/validators.py#L164. + Validates that the value is a string that is a valid email address according + to RFC 2822 and optionally accepts non-compliant fields listed in the + `whitelist` argument. Substantially copied from Django (v2.0.x): + https://github.com/django/django/blob/stable/2.0.x/django/core/validators.py#L164 """ + valid_noun = 'an email address' introspect_type = 'email_address' ip_schema = IPAddress() - # unused, will be removed in version 2.0.0 - message = None # type: ignore - code = None # type: ignore - user_regex = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string @@ -53,55 +44,61 @@ class EmailAddress(UnicodeString): ) domain_whitelist = frozenset({'localhost'}) - def __init__(self, message=None, code=None, whitelist=None, **kwargs): - # type: (None, None, Iterable[six.text_type], **AnyType) -> None + def __init__(self, *, whitelist: Iterable[str]=None, **kwargs) -> None: """ Construct a new email address field. - :param message: Deprecated, unused, and will be removed in version 2.0.0 - :param code: Deprecated, unused, and will be removed in version 2.0.0 - :param whitelist: If specified, an invalid domain part will be permitted if it is in this list + :param whitelist: If specified, an invalid domain part will be permitted + if it is in this list """ - if whitelist is not None and ( - not hasattr(whitelist, '__iter__') or - not all(isinstance(c, six.text_type) for c in whitelist) - ): - raise TypeError("'whitelist' must be an iterable of unicode strings") + kwargs['allow_blank'] = False + super().__init__(**kwargs) - if message is not None or code is not None: - warnings.warn( - 'Arguments `message` and `code` are deprecated in EmailAddress and will be removed in Conformity 2.0.', - DeprecationWarning, + if whitelist is not None: + if ( + not isinstance(whitelist, Iterable) or + not all(isinstance(c, str) for c in whitelist) + ): + raise TypeError("'whitelist' must be an iterable of strings") + self.domain_whitelist = ( + whitelist if isinstance(whitelist, frozenset) + else frozenset(whitelist) ) - super(EmailAddress, self).__init__(**kwargs) - if whitelist is not None: - self.domain_whitelist = whitelist if isinstance(whitelist, frozenset) else frozenset(whitelist) + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if not v.is_valid(): + return v - def errors(self, value): # type: (AnyType) -> ListType[Error] - # Get any basic type errors - result = super(EmailAddress, self).errors(value) - if result: - return result - if not value or '@' not in value: - return [Error('Not a valid email address (missing @ sign)')] + if '@' not in value: + v.errors.append(Error('Not a valid email address (missing @ symbol)')) + return v + errors = [] user_part, domain_part = value.rsplit('@', 1) if not self.user_regex.match(user_part): - return [Error('Not a valid email address (invalid local user field)', pointer=user_part)] - if domain_part in self.domain_whitelist or self.is_domain_valid(domain_part): - return [] - else: + errors.append(Error( + 'Not a valid email address (invalid local user field)', + pointer=user_part, + )] + if ( + domain_part not in self.domain_whitelist and + not self.is_domain_valid(domain_part) + ): try: domain_part = domain_part.encode('idna').decode('ascii') - if self.is_domain_valid(domain_part): - return [] + domain_valid = self.is_domain_valid(domain_part) except UnicodeError: - pass - return [Error('Not a valid email address (invalid domain field)', pointer=domain_part)] + domain_valid = False + if not domain_valid: + errors.append(Error( + 'Not a valid email address (invalid domain field)', + pointer=domain_part, + )] + return Validation(errors=errors) @classmethod - def is_domain_valid(cls, domain_part): # type: (six.text_type) -> bool + def is_domain_valid(cls, domain_part: str) -> bool: if cls.domain_regex.match(domain_part): return True @@ -114,13 +111,14 @@ def is_domain_valid(cls, domain_part): # type: (six.text_type) -> bool return True return False - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: + domain_whitelist = None + if ( + self.domain_whitelist and + self.domain_whitelist is not self.__class__.domain_whitelist + ): + domain_whitelist = sorted(self.domain_whitelist) + return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'domain_whitelist': ( - sorted(self.domain_whitelist) - if self.domain_whitelist and self.domain_whitelist is not self.__class__.domain_whitelist - else None - ), - }) + 'domain_whitelist': domain_whitelist, + }).update(super().introspect()) diff --git a/conformity/fields/net.py b/conformity/fields/net.py index f13f4b4..bb67f9f 100644 --- a/conformity/fields/net.py +++ b/conformity/fields/net.py @@ -1,111 +1,107 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import re -from typing import ( - Any as AnyType, - List as ListType, -) +from typing import Any as AnyType -import attr -import six - -from conformity.fields.basic import ( - Introspection, - UnicodeString, -) +from conformity.fields.builtin import String from conformity.fields.meta import Any from conformity.fields.utils import strip_none -from conformity.types import Error +from conformity.types import ( + Error, + Validation, +) +from conformity.typing import Introspection ipv4_regex = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$') -@attr.s -class IPv4Address(UnicodeString): +class IPv4Address(String): """ - Conformity field that ensures that the value is a unicode string that is a valid IPv4 address. + Validates that the value is a string that is a valid IPv4 address. """ + valid_noun = 'an IPv4 address' introspect_type = 'ipv4_address' - def errors(self, value): # type: (AnyType) -> ListType[Error] + def validate(self, value: AnyType) -> Validation: # Get any basic type errors - result = super(IPv4Address, self).errors(value) - if result: - return result - # Check for IPv4-ness - if ipv4_regex.match(value): - return [] - else: - return [Error('Not a valid IPv4 address')] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) + v = super().validate(value) + if ( + v.is_valid() and + not ipv4_regex.match(value) + ): + v.errors.append(Error('Not a valid IPv4 address')) + return v -@attr.s -class IPv6Address(UnicodeString): +class IPv6Address(String): """ - Conformity field that ensures that the value is a unicode string that is a valid IPv6 address. + Validates that the value is a string that is a valid IPv6 address. """ + valid_noun = 'an IPv6 address' introspect_type = 'ipv6_address' - def errors(self, value): # type: (AnyType) -> ListType[Error] + def validate(self, value: AnyType) -> Validation: # Get any basic type errors - result = super(IPv6Address, self).errors(value) - if result: - return result - # It must have at least one : + v = super().validate(value) + if v.errors: + return v + + # Validate formatting if ':' not in value: - return [Error('Not a valid IPv6 address (no colons)')] - # We can only have one '::' shortener. - if value.count('::') > 1: - return [Error('Not a valid IPv6 address (multiple shorteners)')] - # '::' should be encompassed by start, digits or end. - if ':::' in value: - return [Error('Not a valid IPv6 address (shortener not bounded)')] - # A single colon can neither start nor end an address. - if ((value.startswith(':') and not value.startswith('::')) or - (value.endswith(':') and not value.endswith('::'))): - return [Error('Not a valid IPv6 address (colon at start or end)')] - # We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid) - if value.count(':') > 7: - return [Error('Not a valid IPv6 address (too many colons)')] - # If we have no concatenation, we need to have 8 fields with 7 ':'. - if '::' not in value and value.count(':') != 7: + # It must have at least one : + v.errors.append(Error('Not a valid IPv6 address (no colons)')) + elif value.count('::') > 1: + # We can only have one '::' shortener. + v.errors.append(Error('Not a valid IPv6 address (multiple shorteners)')) + elif ':::' in value: + # '::' should be encompassed by start, digits or end. + v.errors.append(Error('Not a valid IPv6 address (shortener not bounded)')) + elif ( + (value.startswith(':') and not value.startswith('::')) or + (value.endswith(':') and not value.endswith('::')) + ): + # A single colon can neither start nor end an address. + v.errors.append(Error('Not a valid IPv6 address (colon at start or end)')) + elif value.count(':') > 7: + # We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid) + v.errors.append(Error('Not a valid IPv6 address (too many colons)')) + elif '::' not in value and value.count(':') != 7: + # If we have no concatenation, we need to have 8 fields with 7 ':'. # We might have an IPv4 mapped address. if value.count('.') != 3: - return [Error('Not a valid IPv6 address (v4 section not valid address)')] - value = self.expand_ipv6_address(value) - # Check that each of the hextets are between 0x0 and 0xFFFF. - for hextet in value.split(':'): - if hextet.count('.') == 3: - # If we have an IPv4 mapped address, the IPv4 portion has to - # be at the end of the IPv6 portion. - if not value.split(':')[-1] == hextet: - return [Error('Not a valid IPv6 address (v4 section not at end)')] - if not ipv4_regex.match(hextet): - return [Error('Not a valid IPv6 address (v4 section not valid address)')] - else: - try: - # a value error here means that we got a bad hextet, - # something like 0xzzzz - if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF: - return [Error('Not a valid IPv6 address (invalid hextet)')] - except ValueError: - return [Error('Not a valid IPv6 address (invalid hextet)')] - return [] + v.errors.append(Error('Not a valid IPv6 address (v4 section not valid address)')) + + if not v.errors: + value = self.expand_ipv6_address(value) + # Check that each of the hextets are between 0x0 and 0xFFFF. + for hextet in value.split(':'): + if v.errors: + # Fail fast if we have an error + break + if hextet.count('.') == 3: + # If we have an IPv4 mapped address, the IPv4 portion has to + # be at the end of the IPv6 portion. + if not value.split(':')[-1] == hextet: + v.errors.append(Error( + 'Not a valid IPv6 address (v4 section not at end)', + )) + elif not ipv4_regex.match(hextet): + v.errors.append(Error( + 'Not a valid IPv6 address (v4 section not valid address)', + )) + else: + try: + # a value error here means that we got a bad hextet, + # something like 0xzzzz + if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF: + v.errors.append(Error('Not a valid IPv6 address (invalid hextet)')) + except ValueError: + v.errors.append(Error('Not a valid IPv6 address (invalid hextet)')) + return v @staticmethod - def expand_ipv6_address(value): # type: (six.text_type) -> six.text_type + def expand_ipv6_address(value: str) -> str: """ Expands a potentially-shortened IPv6 address into its full length """ @@ -120,7 +116,7 @@ def expand_ipv6_address(value): # type: (six.text_type) -> six.text_type if len(hextet) > 1: sep = len(hextet[0].split(':')) + len(hextet[1].split(':')) new_ip = hextet[0].split(':') - for _ in six.moves.range(fill_to - sep): + for _ in range(fill_to - sep): new_ip.append('0000') new_ip += hextet[1].split(':') else: @@ -132,18 +128,13 @@ def expand_ipv6_address(value): # type: (six.text_type) -> six.text_type ret_ip.append(('0' * (4 - len(hextet_str)) + hextet_str).lower()) return ':'.join(ret_ip) - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) - class IPAddress(Any): """ - Conformity field that ensures that the value is a unicode string that is a valid IPv4 or IPv6 address. + Validates that the value is a string that is a valid IPv4 or IPv6 address. """ + valid_noun = 'an IP address' introspect_type = 'ip_address' - def __init__(self, **kwargs): # type: (**AnyType) -> None - super(IPAddress, self).__init__(IPv4Address(), IPv6Address(), **kwargs) + def __init__(self, **kwargs: **AnyType) -> None: + super().__init__(IPv4Address(), IPv6Address(), **kwargs) diff --git a/conformity/fields/temporal.py b/conformity/fields/temporal.py index 363c18d..6d43ff5 100644 --- a/conformity/fields/temporal.py +++ b/conformity/fields/temporal.py @@ -1,174 +1,144 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import datetime from typing import ( Any as AnyType, - FrozenSet, - List as ListType, - Optional, - Tuple as TupleType, - Type, - Union, + Generic, + TypeVar, ) -import warnings -import attr -import six - -from conformity.fields.basic import ( - Base, - Introspection, -) +from conformity.fields.base import BaseField from conformity.fields.utils import strip_none -from conformity.types import Error -from conformity.utils import ( - attr_is_optional, - attr_is_string, +from conformity.types import ( + Error, + Validation, ) +from conformity.typing import Introspection try: # noinspection PyUnresolvedReferences from freezegun import api as _freeze - valid_datetime_types = frozenset({datetime.datetime, _freeze.FakeDatetime}) - valid_date_types = frozenset({datetime.date, _freeze.FakeDate}) + DATETIME_TYPES = (datetime.datetime, _freeze.FakeDatetime) + DATE_TYPES = (datetime.date, _freeze.FakeDate) except ImportError: - valid_datetime_types = frozenset({datetime.datetime}) - valid_date_types = frozenset({datetime.date}) + DATETIME_TYPES = datetime.datetime + DATE_TYPES = datetime.date -@attr.s -class TemporalBase(Base): +T = TypeVar('T', datetime.date, datetime.time, datetime.datetime, datetime.timedelta) + + +class TemporalBase(Generic[T], BaseField): """ Common base class for all temporal types. Cannot be used on its own without extension. """ - # These four must be overridden - introspect_type = None # type: six.text_type - valid_isinstance = None # type: Optional[Union[Type, TupleType[Type, ...]]] - valid_noun = None # type: six.text_type - valid_types = None # type: FrozenSet[Type] - - gt = attr.ib(default=None) # type: Union[datetime.date, datetime.time, datetime.datetime, datetime.timedelta] - gte = attr.ib(default=None) # type: Union[datetime.date, datetime.time, datetime.datetime, datetime.timedelta] - lt = attr.ib(default=None) # type: Union[datetime.date, datetime.time, datetime.datetime, datetime.timedelta] - lte = attr.ib(default=None) # type: Union[datetime.date, datetime.time, datetime.datetime, datetime.timedelta] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def __attrs_post_init__(self): # type: () -> None - if self.gt is not None and self._invalid(self.gt): - raise TypeError("'gt' value {!r} cannot be used for comparisons in this type".format(self.gt)) - if self.gte is not None and self._invalid(self.gte): - raise TypeError("'gte' value {!r} cannot be used for comparisons in this type".format(self.gte)) - if self.lt is not None and self._invalid(self.lt): - raise TypeError("'lt' value {!r} cannot be used for comparisons in this type".format(self.lt)) - if self.lte is not None and self._invalid(self.lte): - raise TypeError("'lte' value {!r} cannot be used for comparisons in this type".format(self.lte)) + # These three must be overridden + valid_type = None + valid_noun = None + introspect_type = None + + def __init__( + self, + *, + gt: T=None, + gte: T=None, + lt: T=None, + lte: T=None, + **kwargs + ) -> None: + super().__init__(**kwargs) + self.gt = self.validate_parameter('gt', gt) + self.gte = self.validate_parameter('gte', gte) + self.lt = self.validate_parameter('lt', lt) + self.lte = self.validate_parameter('lte', lte) @classmethod - def _invalid(cls, value): - return type(value) not in cls.valid_types and ( - not cls.valid_isinstance or not isinstance(value, cls.valid_isinstance) - ) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if self._invalid(value): - # using stricter type checking, because date is subclass of datetime, but they're not comparable - return [Error('Not a {} instance'.format(self.valid_noun))] + def validate_parameter(cls, name: str, value: T) -> T: + if value is not None and not isinstance(value, self.valid_type): + raise TypeError(( + "'{}' value {!r} cannot be used for " + "comparisons in this type" + ).format(name, value)) + return value + + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v errors = [] if self.gt is not None and value <= self.gt: errors.append(Error('Value not > {}'.format(self.gt))) - if self.lt is not None and value >= self.lt: - errors.append(Error('Value not < {}'.format(self.lt))) if self.gte is not None and value < self.gte: errors.append(Error('Value not >= {}'.format(self.gte))) + if self.lt is not None and value >= self.lt: + errors.append(Error('Value not < {}'.format(self.lt))) elif self.lte is not None and value > self.lte: errors.append(Error('Value not <= {}'.format(self.lte))) - return errors + return Validation(errors=errors) - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'gt': six.text_type(self.gt) if self.gt else None, - 'gte': six.text_type(self.gte) if self.gte else None, - 'lt': six.text_type(self.lt) if self.lt else None, - 'lte': six.text_type(self.lte) if self.lte else None, - }) + 'gt': str(self.gt) if self.gt else None, + 'gte': str(self.gte) if self.gte else None, + 'lt': str(self.lt) if self.lt else None, + 'lte': str(self.lte) if self.lte else None, + }).update(super().introspect()) -@attr.s -class DateTime(TemporalBase): +class DateTime(TemporalBase[DATETIME_TYPES]) """ - Conformity field that ensures that the value is a `datetime.datetime` instance and optionally enforces boundaries - for that `datetime` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `datetime` instances if - specified. + Validates that the value is a `datetime.datetime` instance and optionally + enforces boundaries for that `datetime` with the `gt`, `gte`, `lt`, and + `lte` arguments, which must also be `datetime` instances if specified. """ - valid_types = valid_datetime_types - valid_noun = 'datetime.datetime' + valid_type = DATETIME_TYPES + valid_noun = 'a datetime.datetime' introspect_type = 'datetime' -@attr.s -class Date(TemporalBase): +class Date(TemporalBase[DATE_TYPES]): """ - Conformity field that ensures that the value is a `datetime.date` instance and optionally enforces boundaries - for that `date` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `date` instances if specified. + Validates that the value is a `datetime.date` instance and optionally + enforces boundaries for that `date` with the `gt`, `gte`, `lt`, and `lte` + arguments, which must also be `date` instances if specified. """ - valid_types = valid_date_types - valid_noun = 'datetime.date' + valid_type = DATE_TYPES + valid_noun = 'a datetime.date' introspect_type = 'date' -@attr.s -class Time(TemporalBase): +class Time(TemporalBase[datetime.time]): """ - Conformity field that ensures that the value is a `datetime.time` instance and optionally enforces boundaries - for that `time` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `time` instances if specified. + Validates that the value is a `datetime.time` instance and optionally + enforces boundaries for that `time` with the `gt`, `gte`, `lt`, and `lte` + arguments, which must also be `time` instances if specified. """ - valid_types = frozenset({datetime.time}) - valid_noun = 'datetime.time' + valid_type = datetime.time + valid_noun = 'a datetime.time' introspect_type = 'time' -@attr.s -class TimeDelta(TemporalBase): +class TimeDelta(TemporalBase[datetime.timedelta]): """ - Conformity field that ensures that the value is a `datetime.timedelta` instance and optionally enforces boundaries - for that `timedelta` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `timedelta` instances if - specified. + Validates that the value is a `datetime.timedelta` instance and optionally + enforces boundaries for that `timedelta` with the `gt`, `gte`, `lt`, and + `lte` arguments, which must also be `timedelta` instances if specified. """ - valid_types = frozenset({datetime.timedelta}) - valid_noun = 'datetime.timedelta' + valid_type = datetime.timedelta + valid_noun = 'a datetime.timedelta' introspect_type = 'timedelta' -@attr.s -class TZInfo(TemporalBase): +class TZInfo(BaseField): """ - Conformity field that ensures that the value is a `datetime.tzinfo` instance. It has `gt`, `gte`, `lt`, and - `lte` arguments, but they cannot be used, are deprecated, and will be removed in Conformity 2.0.0. + Validates that the value is a `datetime.tzinfo` instance. """ - valid_types = frozenset({datetime.tzinfo}) - valid_isinstance = datetime.tzinfo - valid_noun = 'datetime.tzinfo' + valid_type = datetime.tzinfo + valid_noun = 'a datetime.tzinfo' introspect_type = 'tzinfo' - - def __attrs_post_init__(self): # type: () -> None - if self.gt is not None or self.gte is not None or self.lt is not None or self.lte is not None: - warnings.warn( - 'Arguments `gt`, `gte`, `lt`, and `lte` are deprecated in TZInfo and will be removed in ' - 'Conformity 2.0.', - DeprecationWarning, - ) - - super(TZInfo, self).__attrs_post_init__() diff --git a/conformity/fields/utils.py b/conformity/fields/utils.py index 877be7c..fd1a33c 100644 --- a/conformity/fields/utils.py +++ b/conformity/fields/utils.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from typing import ( Dict, Hashable, @@ -22,8 +17,7 @@ IssueVar = TypeVar('IssueVar', Issue, Error, Warning) -def strip_none(value): - # type: (Dict[KT, VT]) -> Dict[KT, VT] +def strip_none(value: Dict[KT, VT]) -> Dict[KT, VT]: """ Takes a dict and removes all keys that have `None` values, used mainly for tidying up introspection responses. Take care not to use this on something @@ -32,8 +26,7 @@ def strip_none(value): return {k: v for k, v in value.items() if v is not None} -def update_pointer(issue, pointer_or_prefix): - # type: (IssueVar, Hashable) -> IssueVar +def update_pointer(issue: IssueVar, pointer_or_prefix: Hashable) -> IssueVar: """ Helper function to update a pointer attribute with a (potentially prefixed) dictionary key or list index. From 6a9326f776a10c5fad8e20d0ba4f555008e77f7b Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Mon, 3 Aug 2020 16:17:53 -0500 Subject: [PATCH 05/10] Add protocol fields --- conformity/fields/protocols.py | 158 +++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 conformity/fields/protocols.py diff --git a/conformity/fields/protocols.py b/conformity/fields/protocols.py new file mode 100644 index 0000000..11d6167 --- /dev/null +++ b/conformity/fields/protocols.py @@ -0,0 +1,158 @@ +from collections import abc +import numbers +from typing import Any + +from conformity.fields.base import BaseField +from conformity.types import ( + Error, + Validation, +) + + +class Callable(BaseField): + """ + Validates that the value is callable + """ + + valid_type = abc.Callable + valid_noun = 'callable' + + +class Container(BaseField): + """ + Validates that the value implements the Container protocol (i.e., implements + the __conatins__ method) + """ + + valid_type = abc.Container + + +class Hashable(BaseField): + """ + Validates that the value is hashable (i.e., `hash(...)` can be called on the + value without error). + """ + + valid_type = abc.Hashable + valid_noun = 'hashable' + + +class Iterable(BaseField): + """ + Validates that the value is iterable + """ + + valid_type = abc.Iterable + valid_noun = 'iterable' + + +class Mapping(BaseField): + """ + Validates that the value implements the Mapping protocol (e.g. a dictionary) + """ + + valid_type = abc.Mapping + + +class Number(BaseField): + """ + Validates that the value is a Number and, optionally, enforces boundaries + for that number with the `gt`, `gte`, `lt`, and `lte` arguments. + """ + + valid_type = numbers.Number + + def __init__( + self, + *, + description: str, + allow_boolean: bool=False, + gt: int=None, + gte: int=None, + lt: int=None, + lte: int=None, + ): + super().__init__(description) + self.allow_boolean = allow_boolean + self.gt = gt + self.gte = gte + self.lt = lt + self.lte = lte + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if not self.allow_boolean and isinstance(value, bool): + v.errors.append('Value is not {}'.format(self.valid_noun)) + + if v.is_valid(): + if self.gt is not None and value <= self.gt: + v.errors.append(Error('Value not > {}'.format(self.gt))) + if self.lt is not None and value >= self.lt: + v.errors.append(Error('Value not < {}'.format(self.lt))) + if self.gte is not None and value < self.gte: + v.errors.append(Error('Value not >= {}'.format(self.gte))) + if self.lte is not None and value > self.lte: + v.errors.append(Error('Value not <= {}'.format(self.lte))) + return v + + def introspect(self) -> Introspection: + return strip_none({ + 'gt': self.gt, + 'gte': self.gte, + 'lt': self.lt, + 'lte': self.lte, + }).update(super().introspect()) + + +class Sized(BaseField): + """ + Validates that the value implements the Sized protocol (i.e., implements + __len__). Optionally, enforces minimum and maximum lengths on sized values. + """ + + valid_type = abc.Sized + valid_noun = 'sized' + + def __init__( + self, + *, + description: str=None, + min_length: int=None, + max_length: int=None, + ): + super().__init__(description=description) + + # Validate the length constraints + if min_length is not None: + if min_length < 0: + raise ValueError('min_length must be >= 0') + if max_length is not None and min_length > max_length: + raise ValueError('min_length cannot be greater than max_length') + + self.min_length = min_length + self.max_length = max_length + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if v.is_valid(): + value_len = len(value) + if self.min_length is not None and value_len < self.min_length: + v.errors.append( + 'Value must have a length of at least {}'.format( + self.min_length, + ), + ) + elif self.max_length is not None and value_len > self.max_length: + v.errors.append( + 'Value must have a length of no more than {}'.format( + self.max_length, + ), + ) + return v + + def introspect(self) -> Introspection: + return strip_none({ + 'min_length': self.min_length, + 'max_length': self.max_length, + }).update(super().introspect()) + From 7ac8e630138acb213981abac8fc4bc68f02b6563 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Wed, 5 Aug 2020 11:30:17 -0500 Subject: [PATCH 06/10] Update structure types. Finalize simple fields. --- conformity/fields/builtin.py | 196 --------- conformity/fields/country.py | 23 +- conformity/fields/geo.py | 36 +- conformity/fields/legacy.py | 20 + conformity/fields/meta.py | 64 +++ conformity/fields/protocols.py | 66 +++ conformity/fields/simple.py | 95 +++++ conformity/fields/structures.py | 704 +++++++++++--------------------- 8 files changed, 508 insertions(+), 696 deletions(-) delete mode 100644 conformity/fields/builtin.py create mode 100644 conformity/fields/simple.py diff --git a/conformity/fields/builtin.py b/conformity/fields/builtin.py deleted file mode 100644 index 0272c37..0000000 --- a/conformity/fields/builtin.py +++ /dev/null @@ -1,196 +0,0 @@ -import datetime -import decimal -from typing import ( - Any as AnyType, - Dict, - List as ListType, - Optional, - Tuple as TupleType, - Type, - Union, -) - -import attr -import six - -from conformity.constants import ERROR_CODE_UNKNOWN -from conformity.fields.utils import strip_none -from conformity.fields.base import BaseField -from conformity.types import ( - Error, - Warning, - Validation, -) -from conformity.typing import Introspection - - -# TODO: update -class Constant(BaseField): - """ - Conformity field that ensures that the value exactly matches the constant - value supplied or, if multiple constant values are supplied, exactly matches - one of those values. - """ - - introspect_type = 'constant' - - def __init__(self, *args, **kwargs): # type: (*AnyType, **AnyType) -> None - self.values = frozenset(args) - if not self.values: - raise ValueError('You must provide at least one constant value') - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - # Check they didn't pass any other kwargs - if kwargs: - raise TypeError('Invalid keyword arguments for Constant: {}'.format(kwargs.keys())) - - def _repr(cv): - return '"{}"'.format(cv) if isinstance(cv, six.string_types) else '{}'.format(cv) - - if len(self.values) == 1: - self._error_message = 'Value is not {}'.format(_repr(tuple(self.values)[0])) - else: - self._error_message = 'Value is not one of: {}'.format(', '.join(sorted(_repr(v) for v in self.values))) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - try: - is_valid = value in self.values - except TypeError: - # Unhashable values can't be used for membership checks. - is_valid = False - - if not is_valid: - return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)] - return [] - - def introspect(self) -> Introspection: - return strip_none({ - 'type': self.introspect_type, - 'values': [ - s if isinstance(s, (six.text_type, bool, int, float, type(None))) else six.text_type(s) - for s in sorted(self.values, key=six.text_type) - ], - 'description': self.description, - }) - - -class Anything(BaseField): - """ - Validates that the value can be anything. - """ - - introspect_type = 'anything' - - def validate(self, value: AnyType) -> Validation: - return Validation() - - -class Boolean(BaseField): - """ - Validates that the value is a boolean. - """ - - valid_type = bool - valid_noun = 'a boolean' - introspect_type = 'boolean' - - -class Integer(Number): - """ - Validates that the value is an integer - """ - - valid_type = int - valid_noun = 'an integer' - introspect_type = 'integer' - - -class Float(Number): - """ - Validates that the value is a float - """ - - valid_type = float - - -class Decimal(Number): - """ - Conformity field that ensures that the value is a `decimal.Decimal` and optionally enforces boundaries for that - decimal with the `gt`, `gte`, `lt`, and `lte` arguments. - """ - - valid_type = decimal.Decimal - - -class String(Sized): - """ - Validates that the value is a string. Optionally validates that the string - is not blank. - """ - valid_type = str - introspect_type = 'string' - - def __init__(self, *, allow_blank: bool=True, **kwargs): - super().__init__(**kwargs) - self.allow_blank = allow_blank - - def validate(self, value: AnyType) -> Validation: - v = super().validate(value) - if v.is_valid(): - # TODO: implement "should continue" instead of using is_valid() - # here and elsewhere. - if not (self.allow_blank or value.strip()): - v.errors.append(Error('Value cannot be blank')) - return v - - def introspect(self) -> Introspection: - return strip_none({ - 'allow_blank': self.allow_blank, - }).update(super().introspect()) - - -class Bytes(Sized): - """ - Validate that the value is a byte string - """ - - valid_type = bytes - valid_noun = 'a byte string' - valid_noun = 'byte string' - - -# Deprecated Conformity 1.x aliases -UnicodeString = String -ByteString = Bytes - - -@attr.s -class UnicodeDecimal(Base): - """ - Conformity field that ensures that the value is a unicode string that is also a valid decimal and can successfully - be converted to a `decimal.Decimal`. - """ - - introspect_type = 'unicode_decimal' - - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, six.text_type): - return [ - Error('Invalid decimal value (not unicode string)'), - ] - try: - decimal.Decimal(value) - except decimal.InvalidOperation: - return [ - Error('Invalid decimal value (parse error)'), - ] - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - }) diff --git a/conformity/fields/country.py b/conformity/fields/country.py index 5e89036..e5e5007 100644 --- a/conformity/fields/country.py +++ b/conformity/fields/country.py @@ -1,8 +1,3 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - from typing import ( Any as AnyType, AnyStr, @@ -11,10 +6,12 @@ ) import pycountry -import six -from conformity.types import Error -from conformity.fields.basic import Constant +from conformity.types import ( + Error, + Validation, +) +from conformity.fields.builtin import Constant _countries_a2 = sorted(c.alpha_2 for c in pycountry.countries) @@ -27,9 +24,9 @@ class CountryCodeField(Constant): """ - Conformity field that ensures that the value is a valid ISO 3166 country codes. It permits only current countries - according to the installed version of PyCountry and uses the ISO 3166 alpha-2 codes. This field requires that - PyCountry be installed. + Validates that the value is a valid ISO 3166 country code. It permits only + current countries according to the installed version of PyCountry and uses + the ISO 3166 alpha-2 codes. This field requires PyCountry to be installed. """ introspect_type = 'country_code_field' @@ -49,7 +46,7 @@ def __init__( super(CountryCodeField, self).__init__(*valid_country_codes, **kwargs) self._error_message = 'Not a valid country code' - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, six.text_type): + def errors(self, value: AnyType) -> Validation: + if not isinstance(value, str): return [Error('Not a unicode string')] return super(CountryCodeField, self).errors(value) diff --git a/conformity/fields/geo.py b/conformity/fields/geo.py index 0a7190a..e9a5113 100644 --- a/conformity/fields/geo.py +++ b/conformity/fields/geo.py @@ -1,34 +1,24 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) +from conformity.fields.builtin import Float -import attr -from conformity.fields.basic import Float - - -@attr.s class Latitude(Float): """ - Conformity field that ensures that the value is a float within the normal boundaries of a geographical latitude on - an ellipsoid or sphere. + Validates that the value is a float within the normal boundaries of a + geographical latitude on an ellipsoid or sphere. """ - - def __attrs_post_init__(self): # type: () -> None - # Set end limits if they're not set (and clip any set ones to valid range) - self.gte = max(-90, self.gte or -100) - self.lte = min(90, self.lte or 100) + def __init__(self, *, **kwargs) -> None: + kwargs['gte'] = max(kwargs.get('gte', -100), -90) + kwargs['lte'] = min(kwargs.get('lte', 100), 90) + super().__init__(**kwargs) -@attr.s class Longitude(Float): """ - Conformity field that ensures that the value is a float within the normal boundaries of a geographical longitude on - an ellipsoid or sphere. + Validates that the value is a float within the normal boundaries of a + geographical longitude on an ellipsoid or sphere. """ - def __attrs_post_init__(self): # type: () -> None - # Set end limits if they're not set (and clip any set ones to valid range) - self.gte = max(-180, self.gte or -190) - self.lte = min(180, self.lte or 190) + def __init__(self, *, **kwargs) -> None: + kwargs['gte'] = max(kwargs.get('gte', -190), -180) + kwargs['lte'] = min(kwargs.get('lte', 190), 180) + super().__init__(**kwargs) diff --git a/conformity/fields/legacy.py b/conformity/fields/legacy.py index bb52a0b..c568b0c 100644 --- a/conformity/fields/legacy.py +++ b/conformity/fields/legacy.py @@ -11,6 +11,11 @@ Validation, ) from conformity.fields.base import BaseField +from conformity.fields.simple import ( + Bytes, + Decimal, + String, +) class Base(BaseField): @@ -37,3 +42,18 @@ def validate(self, value: Any) -> Validation: errors=self.errors(value), warnings=self.warnings(value), ) + + +class UnicodeDecimal(String, Decimal): + """ + Validates that the value is a string that is also a valid decimal and can + successfully be converted to a `decimal.Decimal`. + """ + + valid_noun = 'a unicode decimal' + introspect_type = 'unicode_decimal' + + +# Deprecated Conformity 1.x aliases +UnicodeString = String +ByteString = Bytes diff --git a/conformity/fields/meta.py b/conformity/fields/meta.py index 070b2a9..7ac88e5 100644 --- a/conformity/fields/meta.py +++ b/conformity/fields/meta.py @@ -54,6 +54,70 @@ ) +class Anything(BaseField): + """ + Validates that the value can be anything. + """ + + introspect_type = 'anything' + + def validate(self, value: AnyType) -> Validation: + return Validation() + + + + +# TODO: update +class Constant(BaseField): + """ + Conformity field that ensures that the value exactly matches the constant + value supplied or, if multiple constant values are supplied, exactly matches + one of those values. + """ + + introspect_type = 'constant' + + def __init__(self, *args, **kwargs): # type: (*AnyType, **AnyType) -> None + self.values = frozenset(args) + if not self.values: + raise ValueError('You must provide at least one constant value') + self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] + if self.description and not isinstance(self.description, six.text_type): + raise TypeError("'description' must be a unicode string") + # Check they didn't pass any other kwargs + if kwargs: + raise TypeError('Invalid keyword arguments for Constant: {}'.format(kwargs.keys())) + + def _repr(cv): + return '"{}"'.format(cv) if isinstance(cv, six.string_types) else '{}'.format(cv) + + if len(self.values) == 1: + self._error_message = 'Value is not {}'.format(_repr(tuple(self.values)[0])) + else: + self._error_message = 'Value is not one of: {}'.format(', '.join(sorted(_repr(v) for v in self.values))) + + def errors(self, value): # type: (AnyType) -> ListType[Error] + try: + is_valid = value in self.values + except TypeError: + # Unhashable values can't be used for membership checks. + is_valid = False + + if not is_valid: + return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)] + return [] + + def introspect(self) -> Introspection: + return strip_none({ + 'type': self.introspect_type, + 'values': [ + s if isinstance(s, (six.text_type, bool, int, float, type(None))) else six.text_type(s) + for s in sorted(self.values, key=six.text_type) + ], + 'description': self.description, + }) + + @attr.s class Nullable(Base): """ diff --git a/conformity/fields/protocols.py b/conformity/fields/protocols.py index 11d6167..ec3dcb5 100644 --- a/conformity/fields/protocols.py +++ b/conformity/fields/protocols.py @@ -156,3 +156,69 @@ def introspect(self) -> Introspection: 'max_length': self.max_length, }).update(super().introspect()) + +class Collection(Sized): + """ + Validates that the value is a collection of items that all pass validation with + the Conformity field passed to the `contents` argument and optionally + establishes boundaries for that list with the `max_length` and + `min_length` arguments. + """ + + valid_type = abc.Collection + + def __init__(self, contents: BaseField, *, **kwargs) -> None: + super().__init__(**kwargs) + self.contents = contents + + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + + if not v.errors: + for lazy_pointer, element in self._enumerate(value): + v.errors.extend( + update_pointer(error, lazy_pointer.get()) + for error in (self.contents.errors(element) or []) + ) + v.warnings.extend( + update_pointer(warning, lazy_pointer.get()) + for warning in self.contents.warnings(element) + ) + + return v + + @classmethod + def _enumerate(cls, values): + # We use a lazy pointer here so that we don't evaluate the pointer for every item that doesn't generate an + # error. We only evaluate the pointer for each item that does generate an error. This is critical in sets, + # where the pointer is the value converted to a string instead of an index. + return ((cls.LazyPointer(i, value), value) for i, value in enumerate(values)) + + def introspect(self) -> Introspection: + return strip_none({ + 'contents': self.contents.introspect(), + }).update(super().introspect()) + + class LazyPointer(object): + def __init__(self, index, _): + self.get = lambda: index + + +class Sequence(Collection): + valid_type = abc.Sequence + + +class Set(Collection): + """ + Validates that the value is an abstract set of items that all pass + validation with the Conformity field passed to the `contents` argument and + optionally establishes boundaries for that list with the `max_length` and + `min_length` arguments. + """ + + valid_type = abc.Set + introspect_type = 'set' + + class LazyPointer(object): + def __init__(self, _, value): + self.get = lambda: '[{}]'.format(str(value)) diff --git a/conformity/fields/simple.py b/conformity/fields/simple.py new file mode 100644 index 0000000..40f180c --- /dev/null +++ b/conformity/fields/simple.py @@ -0,0 +1,95 @@ +import decimal +from typing import Any + +from conformity.fields.utils import strip_none +from conformity.fields.base import BaseField +from conformity.types import ( + Error, + Validation, +) +from conformity.typing import Introspection + +__all__ = ( + 'Boolean', + 'Bytes', + 'Decimal', + 'Float', + 'Integer', + 'String', +) + + +# +# Numeric types +# +class Boolean(BaseField): + """ + Validates that the value is a boolean + """ + + valid_type = bool + valid_noun = 'a boolean' + introspect_type = 'boolean' + + +class Integer(Number): + """ + Validates that the value is an integer + """ + + valid_type = int + valid_noun = 'an integer' + introspect_type = 'integer' + + +class Float(Number): + """ + Validates that the value is a float + """ + + valid_type = float + + +class Decimal(Number): + """ + Validates that the value is a `decimal.Decimal` + """ + + valid_type = decimal.Decimal + +# +# String types +# +class String(Sized): + """ + Validates that the value is a string. Optionally validates that the string + is not blank. + """ + valid_type = str + introspect_type = 'string' + + def __init__(self, *, allow_blank: bool=True, **kwargs): + super().__init__(**kwargs) + self.allow_blank = allow_blank + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if not v.errors: + if not (self.allow_blank or value.strip()): + v.errors.append(Error('Value cannot be blank')) + return v + + def introspect(self) -> Introspection: + return strip_none({ + 'allow_blank': self.allow_blank, + }).update(super().introspect()) + + +class Bytes(Sized): + """ + Validate that the value is a byte string + """ + + valid_type = bytes + valid_noun = 'a byte string' + valid_noun = 'byte string' diff --git a/conformity/fields/structures.py b/conformity/fields/structures.py index e3b73ad..ba1e921 100644 --- a/conformity/fields/structures.py +++ b/conformity/fields/structures.py @@ -1,13 +1,5 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - import abc -from collections import OrderedDict -import sys from typing import ( - AbstractSet, Any as AnyType, Callable, Container, @@ -27,18 +19,15 @@ cast, ) -import attr -import six - from conformity.constants import ( ERROR_CODE_MISSING, ERROR_CODE_UNKNOWN, ) -from conformity.fields.basic import ( - Anything, - Base, +from conformity.fields.basic import Anything +from conformity.fields.protocols import ( + Collection, Hashable, - Introspection, + Sized, ) from conformity.fields.utils import ( strip_none, @@ -46,286 +35,182 @@ ) from conformity.types import ( Error, + Validation, Warning, ) -from conformity.utils import ( - attr_is_instance, - attr_is_int, - attr_is_iterable, - attr_is_optional, - attr_is_string, -) - - -VT = TypeVar('VT', bound=Container) +from conformity.typing import Introspection -if sys.version_info < (3, 7): - # We can't just decorate this with @six.add_metaclass. In Python < 3.7, that results in this error: - # TypeError: Cannot inherit from plain Generic - # But we can't leave that off, because in Python 3.7+, the abstract method is not enforced without this (it is - # enforced in < 3.7 since GenericMeta extends ABCMeta). - # So we do it this way: - _ACVT = TypeVar('_ACVT') - - def _acv_decorator(_metaclass): # type: (Type) -> Callable[[Type[_ACVT]], Type[_ACVT]] - def wrapper(cls): # type: (Type[_ACVT]) -> Type[_ACVT] - return cls - return wrapper -else: - _acv_decorator = six.add_metaclass - - -@_acv_decorator(abc.ABCMeta) -class AdditionalCollectionValidator(Generic[VT]): +class List(Collection): """ - Conformity fields validating collections can have an additional custom validator that can perform extra checks - across the entire collection, such as ensuring that values that need to refer to other values in the same - collection properly match. This is especially helpful to be able to avoid duplicating the existing collection - validation in Conformity's structure fields. + Validates that the value is a list """ - @abc.abstractmethod - def errors(self, value): # type: (VT) -> ListType[Error] - """ - Called after the collection has otherwise passed validation, and not called if the collection has not passed - its normal validation. - - :param value: The value to be validated. - - :return: A list of errors encountered with this value. - """ + valid_type = list -@attr.s -class _BaseSequenceOrSet(Base): +class Dictionary(BaseField): """ - Conformity field that ensures that the value is a list of items that all pass validation with the Conformity field - passed to the `contents` argument and optionally establishes boundaries for that list with the `max_length` and - `min_length` arguments. + Validates that the value is a dictionary with a specific set of keys and + value that validate with the Conformity fields associated with those keys + (`contents`). Keys are required unless they are listed in the + `optional_keys` argument. No extra keys are allowed unless the + `allow_extra_keys` argument is set to `True`. + + If the `contents` argument is an instance of `OrderedDict`, the field + introspection will include a `display_order` list of keys matching the order + they exist in the `OrderedDict`, and errors will be reported in the order + the keys exist in the `OrderedDict`. Order will be maintained for any calls + to `extend` as long as those calls also use `OrderedDict`. Ordering behavior + is undefined otherwise. This field does NOT enforce that the value it + validates presents keys in the same order. `OrderedDict` is used strictly + for documentation and error-object-ordering purposes only. """ - contents = attr.ib() - max_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - min_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[AnyType]] - - valid_types = None # type: Union[Type[Sized], TupleType[Type[Sized], ...]] - type_noun = None # deprecated, will be removed in Conformity 2.0 - introspect_type = None # type: six.text_type - type_error = None # type: six.text_type - - def __attrs_post_init__(self): # type: () -> None - if self.min_length is not None and self.max_length is not None and self.min_length > self.max_length: - raise ValueError('min_length cannot be greater than max_length in UnicodeString') - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, self.valid_types): - return [Error(self.type_error)] - - result = [] - if self.max_length is not None and len(value) > self.max_length: - result.append( - Error('List is longer than {}'.format(self.max_length)), - ) - elif self.min_length is not None and len(value) < self.min_length: - result.append( - Error('List is shorter than {}'.format(self.min_length)), - ) - for lazy_pointer, element in self._enumerate(value): - result.extend( - update_pointer(error, lazy_pointer.get()) - for error in (self.contents.errors(element) or []) - ) - - if not result and self.additional_validator: - return self.additional_validator.errors(value) - - return result - - def warnings(self, value): - warnings = super(_BaseSequenceOrSet, self).warnings(value) - for lazy_pointer, element in self._enumerate(value): - warnings.extend( - update_pointer(warning, lazy_pointer.get()) - for warning in self.contents.warnings(element) - ) - return warnings - - @classmethod - def _enumerate(cls, values): - # We use a lazy pointer here so that we don't evaluate the pointer for every item that doesn't generate an - # error. We only evaluate the pointer for each item that does generate an error. This is critical in sets, - # where the pointer is the value converted to a string instead of an index. - return ((cls.LazyPointer(i, value), value) for i, value in enumerate(values)) - - def introspect(self): # type: () -> Introspection - introspection = { - 'type': self.introspect_type, - 'contents': self.contents.introspect(), - 'max_length': self.max_length, - 'min_length': self.min_length, - 'description': self.description, - 'additional_validation': ( - self.additional_validator.__class__.__name__ if self.additional_validator else None - ), - } - - return strip_none(introspection) - - class LazyPointer(object): - def __init__(self, index, _): - self.get = lambda: index - - -@attr.s -class List(_BaseSequenceOrSet): - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[list]] - - valid_types = list - introspect_type = 'list' - type_error = 'Not a list' - - -@attr.s -class Sequence(_BaseSequenceOrSet): - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[SequenceType]] - - valid_types = SequenceType - introspect_type = 'sequence' - type_error = 'Not a sequence' - - -@attr.s -class Set(_BaseSequenceOrSet): - """ - Conformity field that ensures that the value is an abstract set of items that all pass validation with the - Conformity field passed to the `contents` argument and optionally establishes boundaries for that list with the - `max_length` and `min_length` arguments. - """ - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[AbstractSet]] - - valid_types = AbstractSet - introspect_type = 'set' - type_error = 'Not a set or frozenset' - - class LazyPointer(object): - def __init__(self, _, value): - self.get = lambda: '[{}]'.format(str(value)) - + valid_type = dict + valid_noun = 'a dictionary' + introspect_type = 'dictionary' -@attr.s -class Dictionary(Base): - """ - Conformity field that ensures that the value is a dictionary with a specific set of keys and value that validate - with the Conformity fields associated with those keys (`contents`). Keys are required unless they are listed in - the `optional_keys` argument. No extra keys are allowed unless the `allow_extra_keys` argument is set to `True`. - - If the `contents` argument is an instance of `OrderedDict`, the field introspection will include a `display_order` - list of keys matching the order they exist in the `OrderedDict`, and errors will be reported in the order the keys - exist in the `OrderedDict`. Order will be maintained for any calls to `extend` as long as those calls also use - `OrderedDict`. Ordering behavior is undefined otherwise. This field does NOT enforce that the value it validates - presents keys in the same order. `OrderedDict` is used strictly for documentation and error-object-ordering - purposes only. - """ + # Deprecated class var method + contents = None # type: Optional[AnyContents] + optional_keys = None # type: Optional[bool] + allow_extra_keys = False # type: bool + # TODO: add __class__.description and __init__ processing? - introspect_type = 'dictionary' + def __init__( + self, + *contents, + *, + optional_keys: Iterable[HashableType]=None, + allow_extra_keys: bool=False, + **kwargs + ) -> None: + super.__init__(**kwargs) - # Makes MyPy allow optional_keys to have this type - _optional_keys_default = frozenset() # type: Union[TupleType[HashableType, ...], FrozenSet[HashableType]] - - contents = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(dict)), - ) # type: Mapping[HashableType, Base] - optional_keys = attr.ib( - default=_optional_keys_default, - validator=attr_is_iterable(attr_is_instance(object)), - ) # type: Union[TupleType[HashableType, ...], FrozenSet[HashableType]] - allow_extra_keys = attr.ib(default=None) # type: bool - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]] - - def __attrs_post_init__(self): # type: () -> None - if self.contents is None and getattr(self.__class__, 'contents', None) is not None: - # If no contents were provided but a subclass has hard-coded contents, use those - self.contents = self.__class__.contents - if self.contents is None: + if ( + contents is None and + getattr(self.__class__, 'contents', None) is not None + ): + # If no contents were provided but a subclass has hard-coded + # contents, use those + contents = self.__class__.contents + if contents is None: # If there are still no contents, raise an error raise ValueError("'contents' is a required argument") - if not isinstance(self.contents, dict): - raise TypeError("'contents' must be a dict") - if ( - self.optional_keys is self._optional_keys_default and - getattr(self.__class__, 'optional_keys', None) is not None - ): - # If the optional_keys argument was defaulted (not specified) but a subclass has it hard-coded, use that - self.optional_keys = self.__class__.optional_keys - if not isinstance(self.optional_keys, frozenset): - self.optional_keys = frozenset(self.optional_keys) - - if self.allow_extra_keys is None and getattr(self.__class__, 'allow_extra_keys', None) is not None: - # If the allow_extra_keys argument was not specified but a subclass has it hard-coded, use that value - self.allow_extra_keys = self.__class__.allow_extra_keys - if self.allow_extra_keys is None: - # If no value is found, default to False - self.allow_extra_keys = False - if not isinstance(self.allow_extra_keys, bool): - raise TypeError("'allow_extra_keys' must be a boolean") + # Build complete key/value field list + item_fields = [] + for fields in contents: + if isinstance(fields, Dictionary): + fields = fields.contents.items() + elif isinstance(fields, dict): + fields = fields.items() + elif not isinstance(fields, abc.Iterable): + raise TypeError( + 'Positional arguments must be either a Dictionary instance, ' + 'a dict instance, or an iterable of (key, value) tuples' + ) + item_fields.extend(fields) + + # Validate optional keys + # TODO: handle __class__.optional_keys + if optional_keys is None: + optional_keys = () + elif not isinstance(optional_keys, abc.Iterable): + raise ValueError("'optional_keys' must be an iterable") + optional_keys = frozenset(optional_keys) + + # Validate each key/value field pair + self._constant_fields = {} + self._variable_fields = [] + for key_field, value_field in item_fields: + # Convert hashable builtin type instances to Literals (i.e., constants) + if isinstance(key_field, LITERAL_TYPES): + key_field = Literal(key_field) + if isinstance(value_field, LITERAL_TYPES): + value_field = Literal(value_field) + + # Validate key/value field types + if not isinstance(key_field, Hashable): + raise ValueError( + 'Dictionary key field must be a Conformity Hashable field' + ) + if not isinstance(value_field, BaseField): + raise ValueError( + 'Dictionary value fields must be a Conformity field' + ) + + if isinstance(key_field, Literal): + if key_field.value in optional_keys: + self._variable_fields.append(Optional(key_field), value_field) + else: + self._constant_fields[key_field.value] = value_field + else: + self._variable_fields.append((key_field, value_field)) - if self.description is None and getattr(self.__class__, 'description', None): - # If the description was not specified but a subclass has it hard-coded, use that value - self.description = self.__class__.description - if self.description is not None and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") + # Validate allow_extra_keys + # TODO: add __class__.allow_extra_keys handling + if not isinstance(allow_extra_keys, bool): + raise TypeError("'allow_extra_keys' must be a boolean") + if allow_extra_keys: + # Add a variable field that accepts anything + self._variable_fields.append((Hashable(), Anything())) - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, dict): - return [Error('Not a dict')] + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v - result = [] - for key, field in self.contents.items(): - # Check key is present - if key not in value: - if key not in self.optional_keys: - result.append( - Error('Missing key: {}'.format(key), code=ERROR_CODE_MISSING, pointer=six.text_type(key)), - ) + # Validate items + for d_key, d_value in value.items(): + if d_key in self._constant_fields: + # Validate constant key field + value_field = self._constant_fields[d_key] + value_v = value_field.validate(d_value) else: - # Check key type - result.extend( - update_pointer(error, key) - for error in (field.errors(value[key]) or []) - ) - # Check for extra keys - extra_keys = set(value.keys()) - set(self.contents.keys()) - if extra_keys and not self.allow_extra_keys: - result.append( - Error( - 'Extra keys present: {}'.format(', '.join(six.text_type(key) for key in sorted(extra_keys))), - code=ERROR_CODE_UNKNOWN, - ), - ) - - if not result and self.additional_validator: - return self.additional_validator.errors(value) + # Validate variable key field + # TODO: extend warnings + key_valid = False + key_errors = [] + value_valid = False + value_errors = [] + for key_field, value_field in self._variable_fields: + key_v = key_field.validate(d_key) + if key_v.errors: + if not key_valid: + key_errors = [] + else: + key_valid = True + value_v = value_field.validate(d_value) + if value_v.errors: + if not value_valid: + value_errors.extend(value_v.errors) + else: + value_valid = True + + # result = [] + # for key, field in self.contents.items(): + # # Check key is present + # if key not in value: + # if key not in self.optional_keys: + # result.append( + # Error('Missing key: {}'.format(key), code=ERROR_CODE_MISSING, pointer=str(key)), + # ) + # else: + # # Check key type + # result.extend( + # update_pointer(error, key) + # for error in (field.errors(value[key]) or []) + # ) + # # Check for extra keys + # extra_keys = set(value.keys()) - set(self.contents.keys()) + # if extra_keys and not self.allow_extra_keys: + # result.append( + # Error( + # 'Extra keys present: {}'.format(', '.join(str(key) for key in sorted(extra_keys))), + # code=ERROR_CODE_UNKNOWN, + # ), + # ) return result @@ -346,227 +231,118 @@ def warnings(self, value): def extend( self, - contents=None, # type: Optional[Mapping[HashableType, Base]] - optional_keys=None, # type: Optional[Union[TupleType[HashableType, ...], FrozenSet[HashableType]]] - allow_extra_keys=None, # type: Optional[bool] - description=None, # type: Optional[six.text_type] - replace_optional_keys=False, # type: bool - additional_validator=None, # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]] - ): - # type: (...) -> Dictionary + *contents, + * + optional_keys: Iterable[HashableType]=None, + allow_extra_keys: bool=None, + description: str=None, + ) -> Dictionary: """ - This method allows you to create a new `Dictionary` that extends the current `Dictionary` with additional - contents and/or optional keys, and/or replaces the `allow_extra_keys` and/or `description` attributes. - - :param contents: More contents, if any, to extend the current contents - :param optional_keys: More optional keys, if any, to extend the current optional keys - :param allow_extra_keys: If non-`None`, this overrides the current `allow_extra_keys` attribute - :param description: If non-`None`, this overrides the current `description` attribute - :param replace_optional_keys: If `True`, then the `optional_keys` argument will completely replace, instead of - extend, the current optional keys - :param additional_validator: If non-`None`, this overrides the current `additional_validator` attribute - - :return: A new `Dictionary` extended from the current `Dictionary` based on the supplied arguments + Creates a new Dictionary instance that "extends" from this one. + + NOTE: This method has been deprecated and will be removed in a future + release. Use Dictionary(, ) + syntax instead. """ - optional_keys = frozenset(optional_keys or ()) return Dictionary( - contents=cast(Type[Union[Dict, OrderedDict]], type(self.contents))( - (k, v) for d in (self.contents, contents) for k, v in six.iteritems(d) - ) if contents else self.contents, - optional_keys=optional_keys if replace_optional_keys else frozenset(self.optional_keys) | optional_keys, - allow_extra_keys=self.allow_extra_keys if allow_extra_keys is None else allow_extra_keys, - description=self.description if description is None else description, - additional_validator=self.additional_validator if additional_validator is None else additional_validator, + self, + *contents, + optional_keys=optional_keys, + allow_extra_keys=( + self.allow_extra_keys + if allow_extra_keys is None + else allow_extra_keys + ), + description=description or self.description ) - def introspect(self): # type: () -> Introspection - display_order = None # type: Optional[ListType[AnyType]] - if isinstance(self.contents, OrderedDict): - display_order = list(self.contents.keys()) - + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'contents': { - key: value.introspect() - for key, value in self.contents.items() - }, - 'optional_keys': sorted(self.optional_keys), - 'allow_extra_keys': self.allow_extra_keys, - 'description': self.description, - 'display_order': display_order, - 'additional_validation': ( - self.additional_validator.__class__.__name__ if self.additional_validator else None - ), - }) + 'contents': [ + { + 'key': key_field.introspect(), + 'value': value_field.introspect(), + } + for key_field, value_field in self.contents.items() + ], + }).update(super().introspect()) -@attr.s -class SchemalessDictionary(Base): +class SchemalessDictionary(Dictionary, Sized): """ - Conformity field that ensures that the value is a dictionary of any keys and values, but optionally enforcing that - the keys pass the Conformity validation specified with the `key_type` argument and/or that the values pass the - Conformity validation specified with the `value_type` argument. Size of the dictionary can also be constrained with - the optional `max_length` and `min_length` arguments. + Validates that the value is a dictionary of any keys and values, but + optionally enforcing that the keys pass the Conformity validation specified + with the `key_type` argument and/or that the values pass the Conformity + validation specified with the `value_type` argument. Size of the dictionary + can also be constrained with the optional `max_length` and `min_length` + arguments. """ introspect_type = 'schemaless_dictionary' - # Makes MyPy allow key_type and value_type have type Base - _default_key_type = attr.Factory(Hashable) # type: Base - _default_value_type = attr.Factory(Anything) # type: Base - - key_type = attr.ib(default=_default_key_type, validator=attr_is_instance(Base)) # type: Base - value_type = attr.ib(default=_default_value_type, validator=attr_is_instance(Base)) # type: Base - max_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - min_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - additional_validator = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)), - ) # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]] - - def __attrs_post_init__(self): # type: () -> None - if self.min_length is not None and self.max_length is not None and self.min_length > self.max_length: - raise ValueError('min_length cannot be greater than max_length in UnicodeString') - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, dict): - return [Error('Not a dict')] - - result = [] - - if self.max_length is not None and len(value) > self.max_length: - result.append(Error('Dict contains more than {} value(s)'.format(self.max_length))) - elif self.min_length is not None and len(value) < self.min_length: - result.append(Error('Dict contains fewer than {} value(s)'.format(self.min_length))) - - for key, field in value.items(): - result.extend( - update_pointer(error, key) - for error in (self.key_type.errors(key) or []) - ) - result.extend( - update_pointer(error, key) - for error in (self.value_type.errors(field) or []) - ) - - if not result and self.additional_validator: - return self.additional_validator.errors(value) - - return result - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - if not isinstance(value, dict): - return [] - - result = [] # type: ListType[Warning] - for d_key, d_value in value.items(): - result.extend( - update_pointer(warning, d_key) - for warning in self.key_type.warnings(d_key) - ) - result.extend( - update_pointer(warning, d_key) - for warning in self.value_type.warnings(d_value) - ) - - return result - - def introspect(self): # type: () -> Introspection - result = { - 'type': self.introspect_type, - 'max_length': self.max_length, - 'min_length': self.min_length, - 'description': self.description, - 'additional_validation': ( - self.additional_validator.__class__.__name__ if self.additional_validator else None - ), - } # type: Introspection - # We avoid using isinstance() here as that would also match subclass instances - if not self.key_type.__class__ == Hashable: - result['key_type'] = self.key_type.introspect() - if not self.value_type.__class__ == Anything: - result['value_type'] = self.value_type.introspect() - - return strip_none(result) + def __init__( + self, + *, + key_type: BaseField=None, + value_type: BaseField=None, + **kwargs + ) -> None: + super().__init__((key_type, value_type), **kwargs) -class Tuple(Base): +class Tuple(BaseField): """ - Conformity field that ensures that the value is a tuple with the same number of arguments as the number of - positional arguments passed to this field, and that each argument passes validation with the corresponding - Conformity field provided to the positional arguments. + Validates that the value is a tuple with the same number of arguments as the + number of positional arguments passed to this field, and that each argument + passes validation with the corresponding Conformity field provided to the + positional arguments. """ - introspect_type = 'tuple' + valid_type = tuple + + def __init__(self, *contents: Iterable[BaseField], **kwargs) -> None: + super().__init__(**kwargs) - def __init__(self, *contents, **kwargs): # type: (*Base, **AnyType) -> None - # We can't use attrs here because we need to capture all positional arguments and support keyword arguments self.contents = contents + + # Validate contents fields for i, c in enumerate(self.contents): - if not isinstance(c, Base): - raise TypeError('Argument {} must be a Conformity field instance, is actually: {!r}'.format(i, c)) - - # We can't put a keyword argument after *args in Python 2, so we need this - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - - self.additional_validator = kwargs.pop( - 'additional_validator', - None, - ) # type: Optional[AdditionalCollectionValidator[TupleType[AnyType, ...]]] - if self.additional_validator and not isinstance(self.additional_validator, AdditionalCollectionValidator): - raise TypeError("'additional_validator' must be an AdditionalCollectionValidator") - - if kwargs: - raise TypeError('Unknown keyword arguments: {}'.format(', '.join(kwargs.keys()))) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, tuple): - return [Error('Not a tuple')] - - result = [] - if len(value) != len(self.contents): - result.append( - Error('Number of elements {} does not match expected {}'.format(len(value), len(self.contents))) + if not isinstance(c, BaseField): + raise TypeError(( + 'Argument {} must be a Conformity ' + 'field instance, is actually: {!r}' + ).format(i, c)) + + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v + + # Validate that value length matches expected length + len_value = len(value) + len_contents = len(self.contents) + if len_value != len_contents: + v.errors.append( + Error(( + 'Number of elements {} does ' + 'not match expected {}' + ).format(len_value, len_contents)) ) + # Validate each element against each field for i, (c_elem, v_elem) in enumerate(zip(self.contents, value)): - result.extend( + v.errors.extend( update_pointer(error, i) for error in (c_elem.errors(v_elem) or []) ) - - if not result and self.additional_validator: - return self.additional_validator.errors(value) - - return result - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - if ( - not isinstance(value, tuple) or - len(value) != len(self.contents) - ): - return [] - - result = [] # type: ListType[Warning] - for i, (field, item) in enumerate(zip(self.contents, value)): - result.extend( + v.warnings.extend( update_pointer(warning, i) for warning in field.warnings(item) ) - return result + return v - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'contents': [value.introspect() for value in self.contents], - 'description': self.description, - 'additional_validation': ( - self.additional_validator.__class__.__name__ if self.additional_validator else None - ), - }) + 'contents': [field.introspect() for field in self.contents], + }).update(super().introspect()) From dcd02d2a4c2ffe410e198e4769479b6addf4a27b Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Thu, 6 Aug 2020 21:41:42 -0500 Subject: [PATCH 07/10] Update the remaining fields. Fix flake8 / mypy issues. Add extend() method to Validation type. --- conformity/error.py | 2 - conformity/fields/__init__.py | 97 +++- conformity/fields/base.py | 77 ++- conformity/fields/country.py | 40 +- conformity/fields/email.py | 22 +- conformity/fields/geo.py | 9 +- conformity/fields/legacy.py | 100 +++- conformity/fields/logging.py | 295 ---------- conformity/fields/meta.py | 864 +++++++++------------------- conformity/fields/net.py | 12 +- conformity/fields/protocols.py | 124 ++-- conformity/fields/python/logging.py | 394 +++++++++++++ conformity/fields/simple.py | 16 +- conformity/fields/structures.py | 118 ++-- conformity/fields/temporal.py | 28 +- conformity/fields/utils.py | 22 +- conformity/settings/__init__.py | 1 - conformity/types.py | 66 ++- 18 files changed, 1127 insertions(+), 1160 deletions(-) delete mode 100644 conformity/fields/logging.py create mode 100644 conformity/fields/python/logging.py diff --git a/conformity/error.py b/conformity/error.py index 38a5f5c..6fdc89a 100644 --- a/conformity/error.py +++ b/conformity/error.py @@ -1,5 +1,3 @@ -from typing import cast - __all__ = ( 'KeywordError', 'PositionalError', diff --git a/conformity/fields/__init__.py b/conformity/fields/__init__.py index 282d645..c5a74a1 100644 --- a/conformity/fields/__init__.py +++ b/conformity/fields/__init__.py @@ -1,47 +1,71 @@ -from conformity.fields.basic import ( - Anything, - Base, - Boolean, - ByteString, - Constant, - Decimal, - Float, - Hashable, - Integer, - UnicodeDecimal, - UnicodeString, +from conformity.fields.base import ( + BaseField, + BaseTypeField, ) from conformity.fields.email import EmailAddress from conformity.fields.geo import ( Latitude, Longitude, ) -from conformity.fields.meta import ( - All, - Any, - BooleanValidator, - ClassConfigurationSchema, - Deprecated, +from conformity.fields.legacy import ( + Base, + ByteString, Null, Nullable, ObjectInstance, - Polymorph, - PythonPath, - TypePath, + SchemalessDictionary, TypeReference, + UnicodeDecimal, + UnicodeString, +) +from conformity.fields.meta import ( + All, + Any, + Anything, + Chain, + Constant, + Instance, + Polymorph, + Type, + Validator, +) +from conformity.fields.modifiers import ( + Deprecated, + Optional, ) from conformity.fields.net import ( IPAddress, IPv4Address, IPv6Address, ) +from conformity.fields.protocols import ( + Callable, + Collection, + Container, + Hashable, + Iterable, + Mapping, + Number, + Sequence, + Set, + Sized, +) +from conformity.fields.python import ( + ClassConfigurationSchema, + PythonPath, + TypePath, +) +from conformity.fields.simple import ( + Boolean, + Bytes, + Decimal, + Float, + Integer, + String, +) from conformity.fields.structures import ( - AdditionalCollectionValidator, Dictionary, List, - SchemalessDictionary, - Sequence, - Set, Tuple, ) from conformity.fields.temporal import ( @@ -52,18 +76,22 @@ TZInfo, ) - __all__ = ( - 'AdditionalCollectionValidator', 'All', 'Any', 'Anything', 'Base', + 'BaseField', + 'BaseTypeField', 'Boolean', - 'BooleanValidator', 'ByteString', + 'Bytes', + 'Callable', + 'Chain', 'ClassConfigurationSchema', + 'Collection', 'Constant', + 'Container', 'Date', 'DateTime', 'Decimal', @@ -72,27 +100,36 @@ 'EmailAddress', 'Float', 'Hashable', - 'Integer', 'IPAddress', 'IPv4Address', 'IPv6Address', + 'Instance', + 'Integer', + 'Iterable', 'Latitude', 'List', 'Longitude', + 'Mapping', 'Null', 'Nullable', + 'Number', 'ObjectInstance', + 'Optional', 'Polymorph', 'PythonPath', 'SchemalessDictionary', 'Sequence', 'Set', + 'Sized', + 'String', + 'TZInfo', 'Time', 'TimeDelta', 'Tuple', + 'Type', 'TypePath', 'TypeReference', - 'TZInfo', 'UnicodeDecimal', 'UnicodeString', + 'Validator', ) diff --git a/conformity/fields/base.py b/conformity/fields/base.py index 9bd401b..2673e8f 100644 --- a/conformity/fields/base.py +++ b/conformity/fields/base.py @@ -5,9 +5,9 @@ from typing import ( Any, List, - Optional, ) +from conformity.fields.utils import strip_none from conformity.types import ( Error, Warning, @@ -17,34 +17,17 @@ __all__ = ( 'BaseField', + 'BaseTypeField', ) class _BaseMeta(ABCMeta): def __init__(self, name, bases, attrs): # Validate field definition - try: - valid_type = attrs['valid_type'] - except KeyError: - raise ValueError( - 'All concrete BaseField subclasses must ' - 'specify a valid_type class attribute!' - ) - if 'valid_noun' not in attrs: - # Naively set the type "noun" from the type name - attrs['valid_noun'] = 'a {}'.format(valid_type.__name__) + if 'introspect_type' not in attrs: raise ValueError( - 'All concrete BaseField subclasses must ' - 'specify a valid_noun class attribute!' + 'introspect_type must be defined for field {}'.format(name) ) - if 'introspect_type' not in attrs: - if isinstance(valid_type, tuple): - raise ValueError( - 'introspect_type must be defined for field {} ' - 'when valid_type is a tuple' - ) - # If unset, infer the introspection type from the type name - attrs['introspect_type'] = valid_type.__name__ super().__init__(name, bases, attrs) @@ -56,7 +39,7 @@ class BaseField(metaclass=_BaseMeta): implemented by BaseField subclasses. """ - def __init__(self, *, description: str=None) -> None: + def __init__(self, *, description: str = None) -> None: self.description = description def errors(self, value: Any) -> List[Error]: @@ -65,6 +48,7 @@ def errors(self, value: Any) -> List[Error]: def warnings(self, value: Any) -> List[Warning]: return self.validate(value).warnings + @abstractmethod def validate(self, value: Any) -> Validation: """ Interface for field validation. @@ -72,11 +56,6 @@ def validate(self, value: Any) -> Validation: Returns a Validation instance containing errors (if any) and, optionally, a list of warnings and extracted values. """ - errors = [] - if not isinstance(value, self.valid_type): - errors.append(Error('Value is not {}'.format(self.valid_noun))) - return Validation(errors=errors) - def introspect(self) -> Introspection: """ @@ -87,3 +66,47 @@ def introspect(self) -> Introspection: 'introspect_type': self.introspect_type, 'description': self.description, }) + + +class _BaseTypeMeta(_BaseMeta): + def __init__(self, name, bases, attrs): + # Validate field definition + try: + valid_type = attrs['valid_type'] + except KeyError: + raise ValueError( + 'All concrete TypeBaseField subclasses must ' + 'specify a valid_type class attribute!' + ) + if 'valid_noun' not in attrs: + # Naively set the type "noun" from the type name + attrs['valid_noun'] = 'a {}'.format(valid_type.__name__) + if 'introspect_type' not in attrs: + if isinstance(valid_type, tuple): + raise ValueError(( + 'introspect_type must be defined for field {} ' + 'when valid_type is a tuple' + ).format(name)) + # If unset, infer the introspection type from the type name + attrs['introspect_type'] = valid_type.__name__ + + super().__init__(name, bases, attrs) + + +class BaseTypeField(BaseField, metaclass=_BaseTypeMeta): + """ + The base class from which all other typed Conformity fields inherit. + Validates that the value is an instance of `__class__.valid_type`. + """ + + def validate(self, value: Any) -> Validation: + """ + Interface for field validation. + + Returns a Validation instance containing errors (if any) and, + optionally, a list of warnings and extracted values. + """ + errors = [] + if not isinstance(value, self.valid_type): + errors.append(Error('Value is not {}'.format(self.valid_noun))) + return Validation(errors=errors) diff --git a/conformity/fields/country.py b/conformity/fields/country.py index e5e5007..347c18d 100644 --- a/conformity/fields/country.py +++ b/conformity/fields/country.py @@ -1,17 +1,13 @@ from typing import ( - Any as AnyType, + Any, AnyStr, Callable, - List as ListType, ) import pycountry -from conformity.types import ( - Error, - Validation, -) -from conformity.fields.builtin import Constant +from conformity.fields.meta import Constant +from conformity.fields.simple import String _countries_a2 = sorted(c.alpha_2 for c in pycountry.countries) @@ -22,7 +18,7 @@ ) -class CountryCodeField(Constant): +class CountryCodeField(Constant, String): """ Validates that the value is a valid ISO 3166 country code. It permits only current countries according to the installed version of PyCountry and uses @@ -33,20 +29,22 @@ class CountryCodeField(Constant): def __init__( self, - code_filter=lambda x: True, # type: Callable[[AnyStr], bool] - **kwargs # type: AnyType - ): - # type: (...) -> None + code_filter: Callable[[AnyStr], bool] = lambda x: True, + **kwargs: Any + ) -> None: """ - :param code_filter: If specified, will be called to further filter the available country codes + :param code_filter: If specified, will be called to further filter the + available country codes """ if not callable(code_filter): - raise TypeError('Argument code_filter must be a callable that accepts a country code and returns a bool') - valid_country_codes = (code for code in _countries_a2 if code_filter(code)) - super(CountryCodeField, self).__init__(*valid_country_codes, **kwargs) + raise TypeError( + 'Argument code_filter must be a callable that accepts a ' + 'country code and returns a bool' + ) + valid_country_codes = ( + code + for code in _countries_a2 + if code_filter(code) + ) + super().__init__(*valid_country_codes, **kwargs) self._error_message = 'Not a valid country code' - - def errors(self, value: AnyType) -> Validation: - if not isinstance(value, str): - return [Error('Not a unicode string')] - return super(CountryCodeField, self).errors(value) diff --git a/conformity/fields/email.py b/conformity/fields/email.py index 777e413..469852b 100644 --- a/conformity/fields/email.py +++ b/conformity/fields/email.py @@ -4,10 +4,13 @@ Iterable, ) -from conformity.fields.builtin import String -from conformity.fields.utils import strip_none from conformity.fields.net import IPAddress -from conformity.types import Error +from conformity.fields.simple import String +from conformity.fields.utils import strip_none +from conformity.types import ( + Error, + Validation, +) from conformity.typing import Introspection __all__ = ( @@ -15,7 +18,7 @@ ) -class EmailAddress(UnicodeString): +class EmailAddress(String): """ Validates that the value is a string that is a valid email address according to RFC 2822 and optionally accepts non-compliant fields listed in the @@ -44,7 +47,12 @@ class EmailAddress(UnicodeString): ) domain_whitelist = frozenset({'localhost'}) - def __init__(self, *, whitelist: Iterable[str]=None, **kwargs) -> None: + def __init__( + self, + *, + whitelist: Iterable[str] = None, + **kwargs: Any + ) -> None: """ Construct a new email address field. @@ -80,7 +88,7 @@ def validate(self, value: Any) -> Validation: errors.append(Error( 'Not a valid email address (invalid local user field)', pointer=user_part, - )] + )) if ( domain_part not in self.domain_whitelist and not self.is_domain_valid(domain_part) @@ -94,7 +102,7 @@ def validate(self, value: Any) -> Validation: errors.append(Error( 'Not a valid email address (invalid domain field)', pointer=domain_part, - )] + )) return Validation(errors=errors) @classmethod diff --git a/conformity/fields/geo.py b/conformity/fields/geo.py index e9a5113..f70816a 100644 --- a/conformity/fields/geo.py +++ b/conformity/fields/geo.py @@ -1,12 +1,17 @@ from conformity.fields.builtin import Float +__all__ = ( + 'Latitude', + 'Longitude', +) + class Latitude(Float): """ Validates that the value is a float within the normal boundaries of a geographical latitude on an ellipsoid or sphere. """ - def __init__(self, *, **kwargs) -> None: + def __init__(self, **kwargs) -> None: kwargs['gte'] = max(kwargs.get('gte', -100), -90) kwargs['lte'] = min(kwargs.get('lte', 100), 90) super().__init__(**kwargs) @@ -18,7 +23,7 @@ class Longitude(Float): geographical longitude on an ellipsoid or sphere. """ - def __init__(self, *, **kwargs) -> None: + def __init__(self, **kwargs) -> None: kwargs['gte'] = max(kwargs.get('gte', -190), -180) kwargs['lte'] = min(kwargs.get('lte', 190), 180) super().__init__(**kwargs) diff --git a/conformity/fields/legacy.py b/conformity/fields/legacy.py index c568b0c..2b0b069 100644 --- a/conformity/fields/legacy.py +++ b/conformity/fields/legacy.py @@ -5,17 +5,39 @@ List, ) -from conformity.types import ( - Error, - Warning, - Validation, -) from conformity.fields.base import BaseField +from conformity.fields.meta import ( + Constant, + Instance, + Type, + Validator, +) +from conformity.fields.protocols import Sized from conformity.fields.simple import ( Bytes, Decimal, String, ) +from conformity.fields.structures import Dictionary +from conformity.fields.utils import strip_none +from conformity.types import ( + Error, + Warning, + Validation, +) +from conformity.typing import Introspection + +__all__ = ( + 'Base', + 'ByteString', + 'Null', + 'Nullable', + 'ObjectInstance', + 'SchemalessDictionary', + 'TypeReference', + 'UnicodeDecimal', + 'UnicodeString', +) class Base(BaseField): @@ -44,6 +66,69 @@ def validate(self, value: Any) -> Validation: ) +class Null(Constant): + """ + Legacy field that is shorthand for Constant(None, ...) + """ + def __init__(self, **kwargs): + super().__init__(None, **kwargs) + + +class Nullable(BaseField): + """ + Field that allows a null / `None` value and delegates validation the field + type passed as the first positional argument for all non-null values. + Introspection is a dictionary with "type" set to "nullable" and key + "nullable" set to the introspection of the first positional argument. + """ + + introspect_type = 'nullable' + + def __init__(self, field: BaseField, **kwargs): + super().__init__(**kwargs) + + # Validate arguments + if not isinstance(field, BaseField): + raise TypeError('field argument must be a Conformity field') + + self.field = field + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + + if value is None: + return v + + return self.field.validate(value) + + def introspect(self) -> Introspection: + return strip_none({ + 'nullable': self.field.introspect(), + }).update(super().introspect()) + + +class SchemalessDictionary(Dictionary, Sized): + """ + Validates that the value is a dictionary of any keys and values, but + optionally enforcing that the keys pass the Conformity validation specified + with the `key_type` argument and/or that the values pass the Conformity + validation specified with the `value_type` argument. Size of the dictionary + can also be constrained with the optional `max_length` and `min_length` + arguments. + """ + + introspect_type = 'schemaless_dictionary' + + def __init__( + self, + *, + key_type: BaseField = None, + value_type: BaseField = None, + **kwargs: Any + ) -> None: + super().__init__((key_type, value_type), **kwargs) + + class UnicodeDecimal(String, Decimal): """ Validates that the value is a string that is also a valid decimal and can @@ -55,5 +140,8 @@ class UnicodeDecimal(String, Decimal): # Deprecated Conformity 1.x aliases -UnicodeString = String +BooleanValidator = Validator ByteString = Bytes +ObjectInstance = Instance +TypeReference = Type +UnicodeString = String diff --git a/conformity/fields/logging.py b/conformity/fields/logging.py deleted file mode 100644 index 38b7912..0000000 --- a/conformity/fields/logging.py +++ /dev/null @@ -1,295 +0,0 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - -import collections -import logging -from typing import ( - Any as AnyType, - Hashable as HashableType, - List as ListType, - Mapping, - Optional, - Tuple as TupleType, -) - -import six - -from conformity import fields -from conformity.constants import ERROR_CODE_UNKNOWN -from conformity.types import Error - - -__all__ = ( - 'PythonLogLevel', - 'PYTHON_LOGGER_SCHEMA', - 'PYTHON_LOGGING_CONFIG_SCHEMA', - 'PYTHON_ROOT_LOGGER_SCHEMA', -) - - -class PythonLogLevel(fields.Constant): - """ - A pre-defined `Constant` field with all the possible Python log levels populated. All you need is a description for - documentation. - """ - - def __init__(self, description=None): # type: (Optional[six.text_type]) -> None - """ - Constructs a `PythonLogLevel` field. - - :param description: The description for documentation - """ - super(PythonLogLevel, self).__init__( - logging.getLevelName(logging.DEBUG), - logging.getLevelName(logging.INFO), - logging.getLevelName(logging.WARNING), - logging.getLevelName(logging.ERROR), - logging.getLevelName(logging.CRITICAL), - description=description, - ) - - -class _LoggingValidator(fields.AdditionalCollectionValidator[Mapping[HashableType, AnyType]]): - @staticmethod - def _ensure_configured( - source, # type: Mapping[str, AnyType] - name, # type: str - errors, # type: ListType[Error] - referencer_noun, # type: str - referencer, # type: str - referenced_noun, # type: str - pointer, # type: str - pointer_args, # type: TupleType[AnyType, ...] - ): - if name not in source: - errors.append(Error( - code=ERROR_CODE_UNKNOWN, - message=( - '{referencer_noun} "{referencer}" references {referenced_noun} "{name}", which is not configured.' - ).format( - referencer_noun=referencer_noun, - referencer=referencer, - referenced_noun=referenced_noun, - name=name, - ), - pointer=pointer.format(*pointer_args), - )) - - def errors(self, value): # type: (Mapping[HashableType, AnyType]) -> ListType[Error] - errors = [] # type: ListType[Error] - - formatters = value.get('formatters', {}) # type: Mapping[str, Mapping[str, str]] - filters = value.get('filters', {}) # type: Mapping[str, Mapping[str, AnyType]] - handlers = value.get('handlers', {}) # type: Mapping[str, Mapping[str, AnyType]] - loggers = value.get('loggers', {}) # type: Mapping[str, Mapping[str, AnyType]] - root = value.get('root', {}) # type: Mapping[str, AnyType] - - if filters: - for filter_name, filter_config in filters.items(): - standard_keys = 0 - if '()' in filter_config: - standard_keys = 1 - is_standard = filter_config['()'] == 'logging.Filter' - else: - is_standard = True - if 'name' in filter_config: - standard_keys += 1 - - if is_standard and len(filter_config) > standard_keys: - errors.append(Error( - code=ERROR_CODE_UNKNOWN, - message='Not all keys supported for filter named "{}"'.format(filter_name), - pointer='filters.{}'.format(filter_name), - )) - - if value.get('incremental', False) is not True: - if handlers: - for handler_name, handler_config in handlers.items(): - if 'formatter' in handler_config: - self._ensure_configured( - formatters, handler_config['formatter'], errors, - 'Handler', handler_name, 'formatter', 'handlers.{}.formatter', (handler_name, ), - ) - - handler_filters = handler_config.get('filters', []) # type: ListType[str] - for i, filter in enumerate(handler_filters): - self._ensure_configured( - filters, filter, errors, - 'Handler', handler_name, 'filter', 'handlers.{}.filters.{}', (handler_name, i), - ) - - if loggers: - for logger_name, logger_config in loggers.items(): - logger_filters = logger_config.get('filters', []) # type: ListType[str] - for i, filter in enumerate(logger_filters): - self._ensure_configured( - filters, filter, errors, - 'Logger', logger_name, 'filter', 'loggers.{}.filters.{}', (logger_name, i), - ) - - logger_handlers = logger_config.get('handlers', []) # type: ListType[str] - for i, handler in enumerate(logger_handlers): - self._ensure_configured( - handlers, handler, errors, - 'Logger', logger_name, 'handler', 'loggers.{}.handlers.{}', (logger_name, i), - ) - - if root: - root_filters = root.get('filters', []) # type: ListType[str] - for i, filter in enumerate(root_filters): - self._ensure_configured( - filters, filter, errors, - 'Logger', 'root', 'filter', 'root.filters.{}', (i, ), - ) - - root_handlers = root.get('handlers', []) # type: ListType[str] - for i, handler in enumerate(root_handlers): - self._ensure_configured( - handlers, handler, errors, - 'Logger', 'root', 'handler', 'root.handlers.{}', (i, ), - ) - - return errors - - -PYTHON_ROOT_LOGGER_SCHEMA = fields.Dictionary( - { - 'level': PythonLogLevel( - description='The logging level at or above which this logger will handle logging events and send them to ' - 'its configured handlers.', - ), - 'filters': fields.List( - fields.UnicodeString(), - description='A list of references to keys from `filters` for assigning those filters to this logger.', - ), - 'handlers': fields.List( - fields.UnicodeString(), - description='A list of references to keys from `handlers` for assigning those handlers to this logger.', - ), - }, - optional_keys=('level', 'filters', 'handlers'), -) - -PYTHON_LOGGER_SCHEMA = PYTHON_ROOT_LOGGER_SCHEMA.extend( - contents={ - 'propagate': fields.Boolean( - description='Whether logging events handled by this logger should propagate to other loggers and/or the ' - 'root logger. Defaults to `True`.' - ), - }, - optional_keys=('propagate', ), -) - - -PYTHON_LOGGING_CONFIG_SCHEMA = fields.Dictionary( - collections.OrderedDict(( - ('version', fields.Integer(gte=1, lte=1)), - ('formatters', fields.SchemalessDictionary( - key_type=fields.UnicodeString(), - value_type=fields.Dictionary( - { - 'format': fields.UnicodeString( - description='The format string for this formatter (see ' - 'https://docs.python.org/3/library/logging.html#logrecord-attributes).', - ), - 'datefmt': fields.UnicodeString( - description='The optional date format used when formatting dates in the log output (see ' - 'https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior).', - ), - }, - optional_keys=('datefmt', ), - ), - description='This defines a mapping of logging formatter names to formatter configurations. The `format` ' - 'key specifies the log format and the `datefmt` key specifies the date format.', - )), - ('filters', fields.SchemalessDictionary( - key_type=fields.UnicodeString(), - value_type=fields.Dictionary( - { - '()': fields.TypePath( - base_classes=logging.Filter, - description='The optional, fully-qualified name of the class extending `logging.Filter`, used ' - 'to override the default class `logging.Filter`.', - ), - 'name': fields.UnicodeString( - description='The optional filter name which will be passed to the `name` argument of the ' - '`logging.Filter` class.', - ), - }, - optional_keys=('()', 'name'), - allow_extra_keys=True, - ), - description='This defines a mapping of logging filter names to filter configurations. If a config has ' - 'only the `name` key, then `logging.Filter` will be instantiated with that argument. You can ' - 'specify a `()` key (yes, really) to override the default `logging.Filter` class with a ' - 'custom filter implementation (which should extend `logging.Filter`). Extra keys are allowed ' - 'only for custom implementations having extra constructor arguments matching those key names.', - )), - ('handlers', fields.SchemalessDictionary( - key_type=fields.UnicodeString(), - value_type=fields.Dictionary( - { - 'class': fields.TypePath( - base_classes=logging.Handler, - description='The fully-qualified name of the class extending `logging.Handler`.', - ), - 'level': PythonLogLevel( - description='The logging level at or above which this handler will emit logging events.', - ), - 'formatter': fields.UnicodeString( - description='A reference to a key from `formatters` for assigning that formatter to this ' - 'handler.', - ), - 'filters': fields.List( - fields.UnicodeString(), - description='A list of references to keys from `filters` for assigning those filters to this ' - 'handler.', - ), - }, - optional_keys=('level', 'formatter', 'filters'), - allow_extra_keys=True, - ), - description='This defines a mapping of logging handler names to handler configurations. The `class` key ' - 'is the importable Python path to the class extending `logging.Handler`. The `level` and ' - '`filters` keys apply to all handlers. The `formatter` key is valid for all handlers, but not ' - 'all handlers will use it. Extra keys are allowed only for handlers having extra constructor ' - 'arguments matching those key names.', - )), - ('loggers', fields.SchemalessDictionary( - key_type=fields.UnicodeString(), - value_type=PYTHON_LOGGER_SCHEMA, - description='This defines a mapping of logger names to logger configurations. A log event not handled by ' - 'one of these configured loggers (if any) will instead be handled by the root logger. A log ' - 'event handled by one of these configured loggers may still be handled by another logger or ' - 'the root logger unless its `propagate` key is set to `False`.', - )), - ('root', PYTHON_ROOT_LOGGER_SCHEMA), - ('incremental', fields.Boolean( - description='Whether this configuration should be considered incremental to any existing configuration. ' - 'It defaults to `False` and it is rare that you should ever need to change that.', - )), - ('disable_existing_loggers', fields.Boolean( - description='Whether all existing loggers (objects obtained from `logging.getLogger()`) should be ' - 'disabled when this logging config is loaded. Take our advice and *always* set this to ' - '`False`. It defaults to `True` and you almost never want that, because loggers in ' - 'already-loaded modules will stop working.', - )), - )), - optional_keys=( - 'version', - 'formatters', - 'filters', - 'handlers', - 'root', - 'loggers', - 'incremental', - 'disable_existing_loggers', - ), - description='Settings to enforce the standard Python logging dictionary-based configuration, as you would load ' - 'with `logging.config.dictConfig()`. For more information than the documentation here, see ' - 'https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema.', - additional_validator=_LoggingValidator(), -) -"""""" # Empty docstring to make autodoc document this data diff --git a/conformity/fields/meta.py b/conformity/fields/meta.py index 7ac88e5..9f51495 100644 --- a/conformity/fields/meta.py +++ b/conformity/fields/meta.py @@ -1,62 +1,44 @@ -from __future__ import ( - absolute_import, - unicode_literals, -) - -import importlib -from types import ModuleType +import abc from typing import ( Any as AnyType, Callable, - Dict, - Hashable as HashableType, - List as ListType, + Hashable, Mapping, - MutableMapping, - Optional, - Tuple as TupleType, - Type, + Tuple, + TypeType, Union, ) -import attr -import six - from conformity.constants import ( - ERROR_CODE_MISSING, ERROR_CODE_UNKNOWN, - WARNING_CODE_FIELD_DEPRECATED, -) -from conformity.error import ValidationError -from conformity.fields.basic import ( - Base, - Introspection, - attr_is_conformity_field, ) -from conformity.fields.structures import ( - Dictionary, - SchemalessDictionary, -) -from conformity.fields.utils import ( - strip_none, - update_pointer, +from conformity.fields.base import ( + BaseField, + BaseTypeField, ) +from conformity.fields.utils import strip_none from conformity.types import ( Error, - Warning, + Validation, ) -from conformity.utils import ( - attr_is_bool, - attr_is_instance, - attr_is_instance_or_instance_tuple, - attr_is_optional, - attr_is_string, +from conformity.typing import Introspection + +__all__ = ( + 'All', + 'Any', + 'Anything', + 'Chain', + 'Constant', + 'Instance', + 'Polymorph', + 'Type', + 'Validator', ) class Anything(BaseField): """ - Validates that the value can be anything. + Validates that the value can be anything """ introspect_type = 'anything' @@ -65,38 +47,43 @@ def validate(self, value: AnyType) -> Validation: return Validation() - - -# TODO: update class Constant(BaseField): """ - Conformity field that ensures that the value exactly matches the constant - value supplied or, if multiple constant values are supplied, exactly matches - one of those values. + Validates that the value exactly matches the constant parameter supplied or, + if multiple constant parameters are supplied, exactly matches one of those + values. """ introspect_type = 'constant' - def __init__(self, *args, **kwargs): # type: (*AnyType, **AnyType) -> None - self.values = frozenset(args) - if not self.values: + def __init__(self, *values: Hashable, **kwargs: AnyType) -> None: + super().__init__(**kwargs) + + # Validate arguments + if not values: raise ValueError('You must provide at least one constant value') - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - # Check they didn't pass any other kwargs - if kwargs: - raise TypeError('Invalid keyword arguments for Constant: {}'.format(kwargs.keys())) + for i, value in enumerate(values): + if not isinstance(value, abc.Hashable): + raise TypeError(( + 'Constant value provided at index ' + '{} is not hashable' + ).format(i)) - def _repr(cv): - return '"{}"'.format(cv) if isinstance(cv, six.string_types) else '{}'.format(cv) + self.values = frozenset(values) + # Build error message + def _repr(cv): + return '"{}"'.format(cv) if isinstance(cv, str) else '{}'.format(cv) if len(self.values) == 1: - self._error_message = 'Value is not {}'.format(_repr(tuple(self.values)[0])) + self._error_message = 'Value is not {}'.format(_repr(values[0])) else: - self._error_message = 'Value is not one of: {}'.format(', '.join(sorted(_repr(v) for v in self.values))) + self._error_message = 'Value is not one of: {}'.format( + ', '.join(sorted(_repr(v) for v in self.values)), + ) + + def validate(self, value: AnyType) -> Validation: + v = super().validate() - def errors(self, value): # type: (AnyType) -> ListType[Error] try: is_valid = value in self.values except TypeError: @@ -104,80 +91,56 @@ def errors(self, value): # type: (AnyType) -> ListType[Error] is_valid = False if not is_valid: - return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)] - return [] + v.errors.append(Error(self._error_message, code=ERROR_CODE_UNKNOWN)) + + return v def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, 'values': [ - s if isinstance(s, (six.text_type, bool, int, float, type(None))) else six.text_type(s) - for s in sorted(self.values, key=six.text_type) + s + if isinstance(s, (str, bool, int, float, type(None))) + else str(s) + for s in sorted(self.values, key=str) ], - 'description': self.description, - }) - - -@attr.s -class Nullable(Base): - """ - Conformity field that allows a null / `None` value and delegates validation the field type passed as the first - positional argument for all non-null values. Introspection is a dictionary with "type" set to "nullable" and key - "nullable" set to the introspection of the first positional argument. - """ - - introspect_type = 'nullable' - - field = attr.ib(validator=attr_is_conformity_field()) # type: Base - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if value is None: - return [] - - return self.field.errors(value) - - def introspect(self): # type: () -> Introspection - return { - 'type': self.introspect_type, - 'nullable': self.field.introspect(), - } - + }).update(super().introspect()) -class Null(Base): - """ - Conformity field that ensures that the value is null / `None`. Useful as a return type, to indicate that a - function returns nothing, for example. - """ - - introspect_type = 'null' - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if value is not None: - return [Error('Value is not null')] - return [] - def introspect(self): # type: () -> Introspection - return { - 'type': self.introspect_type, - } - - -@attr.s -class Polymorph(Base): +class Polymorph(BaseTypeField): """ - A Conformity field which has one of a set of possible contents based on a field within it (which must be - accessible via `Mapping` key lookups). + A special-case Dictionary field which has one of a set of possible contents + based on a field within it (which must be accessible via `Mapping` key + lookups). """ + valid_type = dict + valid_noun = 'a polymorphic dictionary' introspect_type = 'polymorph' - switch_field = attr.ib(validator=attr_is_string()) # type: six.text_type - contents_map = attr.ib(validator=attr_is_instance(dict)) # type: Mapping[HashableType, Base] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - def _get_switch_value(self, value): - # type: (AnyType) -> TupleType[six.text_type, bool] - + def __init__( + self, + *, + switch_field: str, + contents_map: Mapping[Hashable, BaseField], + **kwargs: AnyType + ): + super().__init__(**kwargs) + + # Validate arguments + if not isinstance(switch_field, str): + raise TypeError('switch_field must be a string') + if not isinstance(contents_map, dict): + raise TypeError('contents_map must be a dictionary') + for key, field in contents_map.items(): + if not isinstance(field, BaseField): + raise TypeError( + 'contents_map[{}] must be a Conformity field'.format(key), + ) + + self.switch_field = switch_field + self.contents_map = contents_map + + def _get_switch_value(self, value: AnyType) -> Tuple[str, bool]: # Get switch field value bits = self.switch_field.split('.') switch_value = value @@ -193,554 +156,279 @@ def _get_switch_value(self, value): return switch_value, valid - def errors(self, value): # type: (AnyType) -> ListType[Error] + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v + switch_value, valid = self._get_switch_value(value) if not valid: - return [Error("Invalid switch value '{}'".format(switch_value), code=ERROR_CODE_UNKNOWN)] + v.errors.append(Error( + 'Invalid switch value "{}"'.format(switch_value), + code=ERROR_CODE_UNKNOWN, + )) + return v - # Get field + # Perform field validation field = self.contents_map[switch_value] - # Run field errors - return field.errors(value) - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - switch_value, valid = self._get_switch_value(value) - if valid: - field = self.contents_map[switch_value] - return field.warnings(value) - return [] + return field.validate(value) - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, 'switch_field': self.switch_field, 'contents_map': { key: value.introspect() for key, value in self.contents_map.items() }, - }) + }).update(super().introspect()) -@attr.s -class ObjectInstance(Base): +class Instance(BaseField): """ - Conformity field that ensures that the value is an instance of the given `valid_type`. + Validates that the value is an instance of the given `valid_type` """ - introspect_type = 'object_instance' + introspect_type = 'instance' + + def __init__( + self, + valid_type: Union[TypeType, Tuple[TypeType, ...]], + **kwargs: AnyType + ) -> None: + super().__init__(**kwargs) + if not isinstance(valid_type, type): + raise TypeError('`valid_type` must be a type') + self.valid_type = valid_type - valid_type = attr.ib(validator=attr_is_instance_or_instance_tuple(type)) # type: Union[Type, TupleType[Type, ...]] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v - def errors(self, value): # type: (AnyType) -> ListType[Error] if not isinstance(value, self.valid_type): - return [Error('Not an instance of {}'.format(getattr(self.valid_type, '__name__', repr(self.valid_type))))] - return [] + v.errors.append(Error( + 'Value is not an instance of {}'.format(getattr( + self.valid_type, '__name__', repr(self.valid_type) + )) + )) + return v - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - # Unfortunately, this is the one sort of thing we can't represent - # super well. Maybe add some dotted path stuff in here. 'valid_type': repr(self.valid_type), - }) + }).update(super().introspect()) -@attr.s -class PythonPath(Base): +class Type(BaseTypeField): """ - Conformity field that accepts only a unicode path to an importable Python type, function, or variable, including - the full path to the enclosing module. Both '.' and ':' are recognized as valid separators between module name and - item name, but if the item is not a top-level member of the module, it can only be accessed by using ':' as the - separator. - - All of the following are valid type name formats: - - foo.bar.MyClass - foo.bar:MyClass - foo.bar.my_function - foo.bar.MY_CONSTANT - foo.bar:MyClass.MY_CONSTANT - baz.qux:ParentClass.SubClass - - This field performs two validations: First that the path is a unicode string, and second that the item is - importable (exists). If you later need to actually access that item, you can use the `resolve_python_path` static - method. Imported items are cached for faster future lookup. - - You can optionally specify a `value_schema` argument to this field, itself a Conformity field, which will perform - further validation on the value of the imported item. + Validates that the value is an instance of `type` and, optionally, that the + value is a subclass of the type or types specified by `base_classes` """ - introspect_type = 'python_path' - - value_schema = attr.ib(default=None, validator=attr_is_optional(attr_is_conformity_field())) # type: Optional[Base] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - - _module_cache = {} # type: Dict[six.text_type, ModuleType] - _import_cache = {} # type: Dict[TupleType[six.text_type, six.text_type], AnyType] - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, six.text_type): - return [Error('Not a unicode string')] + valid_type = type - try: - thing = self.resolve_python_path(value) - except ValueError: - return [Error('Value "{}" is not a valid Python import path'.format(value))] - except ImportError as e: - return [Error('ImportError: {}'.format(six.text_type(e.args[0])))] - except AttributeError as e: - return [Error('AttributeError: {}'.format(six.text_type(e.args[0])))] - - if self.value_schema: - return self.value_schema.errors(thing) - - return [] - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'value_schema': self.value_schema.introspect() if self.value_schema else None, - }) - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - warnings = super(PythonPath, self).warnings(value) - if self.value_schema: - warnings.extend(self.value_schema.warnings(value)) - return warnings - - @classmethod - def resolve_python_path(cls, type_path): # type: (six.text_type) -> AnyType - if ':' in type_path: - module_name, local_path = type_path.split(':', 1) - else: - module_name, local_path = type_path.rsplit('.', 1) - - cache_key = (module_name, local_path) - if cache_key in cls._import_cache: - return cls._import_cache[cache_key] - - if module_name not in cls._module_cache: - cls._module_cache[module_name] = importlib.import_module(module_name) - - thing = cls._module_cache[module_name] # type: AnyType - for bit in local_path.split('.'): - thing = getattr(thing, bit) - - cls._import_cache[cache_key] = thing - - return thing - - -@attr.s -class TypeReference(Base): - """ - Conformity field that ensures that the value is an instance of `type` and, optionally, that the value is a subclass - of the type or types specified by `base_classes`. - """ - introspect_type = 'type_reference' - - base_classes = attr.ib( - default=None, - validator=attr_is_optional(attr_is_instance_or_instance_tuple(type)), - ) # type: Optional[Union[Type, TupleType[Type, ...]]] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] + def __init__( + self, + *, + base_classes: Union[TypeType, Tuple[TypeType, ...]] = None, + **kwargs: AnyType + ) -> None: + super().__init__(**kwargs) + + # Clean arguments + if base_classes is None: + base_classes = () + elif not isinstance(base_classes, tuple): + base_classes = (base_classes,) + + # Validate bases + for base in base_classes: + if not isinstance(base, type): + raise TypeError('{!r} is not a type'.format(base)) + self.base_classes = base_classes - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, type): - return [Error('Not a type')] + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v if self.base_classes and not issubclass(value, self.base_classes): - return [Error('Type {} is not one of or a subclass of one of: {}'.format(value, self.base_classes))] + v.errors.append(Error( + 'Type {} is not one of or a subclass of one of: {}'.format( + value, + self.base_classes, + ), + )) - return [] + return v - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: base_classes = None if self.base_classes: - if isinstance(self.base_classes, type): - base_classes = [six.text_type(self.base_classes)] - else: - base_classes = [six.text_type(c) for c in self.base_classes] + base_classes = [repr(c) for c in self.base_classes] return strip_none({ - 'type': self.introspect_type, - 'description': self.description, 'base_classes': base_classes, - }) - - -class TypePath(PythonPath): - """ - Conformity field that accepts only a unicode path to an importable Python type, including the full path to the - enclosing module. Both '.' and ':' are recognized as a valid separator between module name and type name. - - All of the following are valid type name formats: + }).update(super().introspect()) - foo.bar.MyClass - foo.bar:MyClass - baz.qux:ParentClass.SubClass - This field actually validates that the type is importable, exists, and is a `type`, possibly one that subclasses - one or more `base_classes`. - - This is a special convenience `PythonPath` extension for expecting the imported item to be a type. +class Any(BaseField): """ - def __init__( - self, - base_classes=None, # type: Optional[Union[Type, TupleType[Type, ...]]] - description=None, # type: Optional[six.text_type] - ): - # type: (...) -> None - super(TypePath, self).__init__( - value_schema=TypeReference(base_classes=base_classes), - description=description, - ) - - -@attr.s -class ClassConfigurationSchema(Base): + Validates that the value passes validation with at least one of the + Conformity fields passed as positional arguments """ - A special-case dictionary field that accepts exactly two keys: `path` (a `TypePath`-validated string) and `kwargs` - (a `Dictionary`-or-subclass-validated dict) that can discover initialization schema from classes and validate that - schema prior to instantiation. By default, the dictionary is mutated to add an `object` key containing the resolved - class, but this behavior can be disabled by specifying `add_class_object_to_dict=False` to the field arguments. If - you experience circular dependency errors when using this field, you can mitigate this by specifying - `eager_default_validation=False` to the field arguments. - Typical usage would be as follows, in Python pseudocode: - - .. code-block:: python - - class BaseThing: - ... + introspect_type = 'any' - @fields.ClassConfigurationSchema.provider(fields.Dictionary({...}, ...)) - class Thing1(BaseThing): - ... + def __init__(self, *options: BaseField, **kwargs: AnyType) -> None: + super().__init__(**kwargs) - @fields.ClassConfigurationSchema.provider(fields.Dictionary({...}, ...)) - class Thing2(BaseThing): - ... + # Validate fields + for i, field in enumerate(options): + if not isinstance(field, BaseField): + raise TypeError(( + 'Argument {} must be a Conformity field ' + 'instance, is actually: {!r}' + ).format(i, field)) + self.options = options - settings = get_settings_from_something() - schema = fields.ClassConfigurationSchema(base_class=BaseThing) - errors = schema.errors(**settings[kwargs]) - if errors: - ... handle errors ... + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v - thing = settings['object'](settings) + for field in self.options: + field_v = field.validate(value) + # If there's no errors from a sub-field, then it's all OK! + if not field_v.errors: + return field_v + # Otherwise, add the errors to the overall results + v.extend(field_v) - Another approach, using the helper method on the schema, simplifies that last part: + return v - .. code-block:: python + def introspect(self) -> Introspection: + return strip_none({ + 'options': [option.introspect() for option in self.options], + }).update(super().introspect()) - schema = fields.ClassConfigurationSchema(base_class=BaseThing) - thing = schema.instantiate_from(get_settings_from_something()) # raises ValidationError - However, note that, in both cases, instantiation is not nested. If the settings schema Dictionary on some class has - a key (or further down) whose value is another `ClassConfigurationSchema`, code that consumes those settings will - also have to instantiate objects from those settings. Validation, however, will be nested as in all other things - Conformity. +class All(BaseField): + """ + Validates that the value passes validation with all of the Conformity fields + passed as positional arguments """ - introspect_type = 'class_config_dictionary' - switch_field_schema = TypePath(base_classes=object) - _init_schema_attribute = '_conformity_initialization_schema' - - base_class = attr.ib(default=None, validator=attr_is_optional(attr_is_instance(type))) # type: Optional[Type] - default_path = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] - eager_default_validation = attr.ib(default=True, validator=attr_is_bool()) # type: bool - add_class_object_to_dict = attr.ib(default=True, validator=attr_is_bool()) # type: bool - - def __attrs_post_init__(self): # type: () -> None - self._schema_cache = {} # type: Dict[six.text_type, Union[Dictionary, SchemalessDictionary]] - - if not self.base_class: - if getattr(self.__class__, 'base_class', None): - # If the base class was defaulted but a subclass has hard-coded a base class, use that. - self.base_class = self.__class__.base_class - else: - self.base_class = object - if self.base_class is not object: - # If the base class is not the default, create a new schema instance to validate paths. - self.switch_field_schema = TypePath(base_classes=self.base_class) - else: - self.switch_field_schema = self.__class__.switch_field_schema - - if not self.description and getattr(self.__class__, 'description', None): - # If the description is not specified but a subclass has hard-coded a base class, use that. - self.description = self.__class__.description - - if not self.default_path and getattr(self.__class__, 'default_path', None): - # If the default path is not specified but a subclass has hard-coded a default path, use that. - self.default_path = self.__class__.default_path - if self.default_path and self.eager_default_validation: - # If the default path is specified and eager validation is not disabled, validate the default path. - self.initiate_cache_for(self.default_path) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - if not isinstance(value, Mapping): - return [Error('Not a mapping (dictionary)')] - - # check for extra keys (object is allowed in case this gets validated twice) - extra_keys = [k for k in six.iterkeys(value) if k not in ('path', 'kwargs', 'object')] - if extra_keys: - return [Error( - 'Extra keys present: {}'.format(', '.join(six.text_type(k) for k in sorted(extra_keys))), - code=ERROR_CODE_UNKNOWN, - )] - - sentinel = object() - path = value.get('path', sentinel) - if path is sentinel and not self.default_path: - return [Error('Missing key (and no default specified): path', code=ERROR_CODE_MISSING, pointer='path')] - - if not path or path is sentinel: - path = self.default_path - - errors = self._populate_schema_cache_if_necessary(path) - if errors: - return [update_pointer(e, 'path') for e in errors] - - if isinstance(value, MutableMapping): - value['path'] = path # in case it was defaulted - if self.add_class_object_to_dict: - value['object'] = PythonPath.resolve_python_path(path) - - return [update_pointer(e, 'kwargs') for e in self._schema_cache[path].errors(value.get('kwargs', {}))] - - def initiate_cache_for(self, path): # type: (six.text_type) -> None - errors = self._populate_schema_cache_if_necessary(path) - if errors: - raise ValidationError(errors) - - def _populate_schema_cache_if_necessary(self, path): # type: (six.text_type) -> ListType[Error] - if path in self._schema_cache: - return [] - - errors = self.switch_field_schema.errors(path) - if errors: - return errors - - clazz = PythonPath.resolve_python_path(path) - if not hasattr(clazz, self._init_schema_attribute): - return [Error( - "Neither class '{}' nor one of its superclasses was decorated with " - "@ClassConfigurationSchema.provider".format(path), - )] - - schema = getattr(clazz, self._init_schema_attribute) - if not isinstance(schema, (Dictionary, SchemalessDictionary)): - return [Error( - "Class '{}' attribute '{}' should be a Dictionary or SchemalessDictionary Conformity field or one of " - 'their subclasses'.format(path, self._init_schema_attribute), - )] - - self._schema_cache[path] = schema - return [] + introspect_type = 'all' - def instantiate_from(self, configuration): # type: (MutableMapping[HashableType, AnyType]) -> AnyType - if not isinstance(configuration, MutableMapping): - raise ValidationError([Error('Not a mutable mapping (dictionary)')]) + def __init__(self, *requirements: BaseField, **kwargs: AnyType) -> None: + super().__init__(**kwargs) - errors = self.errors(configuration) - if errors: - raise ValidationError(errors) + for i, field in enumerate(requirements): + if not isinstance(field, BaseField): + raise TypeError(( + 'Argument {} must be a Conformity field ' + 'instance, is actually: {!r}' + ).format(i, field)) - clazz = configuration.get('object') - if not clazz: - clazz = PythonPath.resolve_python_path(configuration['path']) + self.requirements = requirements - return clazz(**configuration.get('kwargs', {})) + def validate(self, value: AnyType) -> Validation: + v = Validation() + for field in self.requirements: + v.extend(field.validate(value)) + return v - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'base_class': six.text_type(self.base_class.__name__) if self.base_class else None, - 'default_path': self.default_path, - 'switch_field': 'path', - 'switch_field_schema': self.switch_field_schema.introspect(), - 'kwargs_field': 'kwargs', - 'kwargs_contents_map': {k: v.introspect() for k, v in six.iteritems(self._schema_cache)}, - }) - - @staticmethod - def provider(schema): # type: (Union[Dictionary, SchemalessDictionary]) -> Callable[[Type], Type] - if not isinstance(schema, (Dictionary, SchemalessDictionary)): - raise TypeError( - "'schema' must be an instance of a Dictionary or SchemalessDictionary Conformity field or one of " - 'their subclasses', - ) - - def wrapper(cls): # type: (Type) -> Type - if not isinstance(cls, type): - raise TypeError("ClassConfigurationSchema.provider can only decorate classes") - setattr(cls, ClassConfigurationSchema._init_schema_attribute, schema) - return cls + 'requirements': [field.introspect() for field in self.requirements], + }).update(super().introspect()) - return wrapper - -class Any(Base): +class Chain(BaseField): """ - Conformity field that ensures that the value passes validation with at least one of the Conformity fields passed - as positional arguments. + Sequentially validates the value with the Conformity fields passed as + positional arguments. Importantly, validation only continues to the next + field if the current field validates the value without errors. """ - introspect_type = 'any' + introspect_type = 'chain' - description = None # type: Optional[six.text_type] - - def __init__(self, *args, **kwargs): # type: (*Base, **AnyType) -> None - # We can't use attrs here because we need to capture all positional arguments and support keyword arguments - self.options = args - for i, r in enumerate(self.options): - if not isinstance(r, Base): - raise TypeError('Argument {} must be a Conformity field instance, is actually: {!r}'.format(i, r)) - - # We can't put a keyword argument after *args in Python 2, so we need this - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - if kwargs: - raise TypeError('Unknown keyword arguments: {}'.format(', '.join(kwargs.keys()))) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - result = [] # type: ListType[Error] - for option in self.options: - sub_errors = option.errors(value) - # If there's no errors from a sub-field, then it's all OK! - if not sub_errors: - return [] - # Otherwise, add the errors to the overall results - result.extend(sub_errors) - return result - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - result = [] # type: ListType[Warning] - for option in self.options: - sub_errors = option.errors(value) - # If there's no errors from a sub-field, then only return warnings from that field. - if not sub_errors: - return option.warnings(value) - # Otherwise, add the warnings to the overall results - result.extend(option.warnings(value)) - return result - - def introspect(self): # type: () -> Introspection - return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'options': [option.introspect() for option in self.options], - }) + def __init__(self, *fields: BaseField, **kwargs: AnyType) -> None: + super().__init__(**kwargs) + for i, field in enumerate(fields): + if not isinstance(field, BaseField): + raise TypeError(( + 'Argument {} must be a Conformity field ' + 'instance, is actually: {!r}' + ).format(i, field)) -class All(Base): - """ - Conformity field that ensures that the value passes validation with at all of the Conformity fields passed as - positional arguments. - """ + self.fields = fields - introspect_type = 'all' + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + for field in self.fields: + if v.errors: + return v + v.extend(field.validate(value)) + return v - description = None # type: Optional[six.text_type] - - def __init__(self, *args, **kwargs): # type: (*Base, **AnyType) -> None - # We can't use attrs here because we need to capture all positional arguments and support keyword arguments - self.requirements = args - for i, r in enumerate(self.requirements): - if not isinstance(r, Base): - raise TypeError('Argument {} must be a Conformity field instance, is actually: {!r}'.format(i, r)) - - # We can't put a keyword argument after *args in Python 2, so we need this - self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type] - if self.description and not isinstance(self.description, six.text_type): - raise TypeError("'description' must be a unicode string") - if kwargs: - raise TypeError('Unknown keyword arguments: {}'.format(', '.join(kwargs.keys()))) - - def errors(self, value): # type: (AnyType) -> ListType[Error] - result = [] # type: ListType[Error] - for requirement in self.requirements: - result.extend(requirement.errors(value) or []) - return result - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - result = [] # type: ListType[Warning] - for requirement in self.requirements: - result.extend(requirement.warnings(value)) - return result - - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, - 'requirements': [requirement.introspect() for requirement in self.requirements], - }) + 'fields': [field.introspect() for field in self.fields], + }).update(super().introspect()) -@attr.s -class BooleanValidator(Base): +class Validator(BaseField): """ - Conformity field that ensures that the value passes validation with the `typing.Callable[[typing.Any], bool]` - `validator` argument passed in to it. + Validates that the value passes validation with the provided + `typing.Callable[[typing.Any], bool]` `validator` argument """ - introspect_type = 'boolean_validator' + introspect_type = 'validator' - validator = attr.ib() # type: Callable[[AnyType], bool] - validator_description = attr.ib(validator=attr_is_string()) # type: six.text_type - error = attr.ib(validator=attr_is_string()) # type: six.text_type - description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type] + def __init__( + self, + validator: Callable[[AnyType], bool], + * + validator_description: str, + error: str, + **kwargs + ) -> None: + super().__init__(**kwargs) + + # Validate arguments + if not isinstance(validator, callable): + raise TypeError('validator argument must be a callable') + if not isinstance(validator_description, str): + raise TypeError('validator_description must be a string') + if not isinstance(error, str): + raise TypeError('error must be a string') + + self.validator = validator + self.validator_description = validator_description + self.error = error + + def validate(self, value: AnyType) -> Validation: + v = super().validate(value) + if v.errors: + return v - def errors(self, value): # type: (AnyType) -> ListType[Error] # Run the validator, but catch any errors and return them as an error. try: ok = self.validator(value) except Exception as e: - return [Error('Validator encountered an error (invalid type?): {!r}'.format(e))] + v.errors.append(Error( + 'Validator encountered an error (invalid type?): {!r}'.format(e) + )) + return v - if ok: - return [] - else: - return [Error(self.error)] + if not ok: + v.errors.append(Error(self.error)) + return v - def introspect(self): # type: () -> Introspection + def introspect(self) -> Introspection: return strip_none({ - 'type': self.introspect_type, - 'description': self.description, 'validator': self.validator_description, - }) - - -@attr.s -class Deprecated(Base): - field = attr.ib() # type: Base - message = attr.ib( - default='This field has been deprecated', - validator=attr_is_optional(attr_is_string()), - ) # type: six.text_type - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - warnings = self.field.warnings(value) - warnings.append(Warning( - code=WARNING_CODE_FIELD_DEPRECATED, - message=self.message, - )) - return warnings - - def introspect(self): - # type: () -> Introspection - field_introspection = self.field.introspect() - field_introspection['deprecated'] = True - return field_introspection + }).update(super().introspect()) diff --git a/conformity/fields/net.py b/conformity/fields/net.py index bb67f9f..3651858 100644 --- a/conformity/fields/net.py +++ b/conformity/fields/net.py @@ -1,14 +1,18 @@ import re from typing import Any as AnyType -from conformity.fields.builtin import String +from conformity.fields.simple import String from conformity.fields.meta import Any -from conformity.fields.utils import strip_none from conformity.types import ( Error, Validation, ) -from conformity.typing import Introspection + +__all__ = ( + 'IPAddress', + 'IPv4Address', + 'IPv6Address', +) ipv4_regex = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$') @@ -136,5 +140,5 @@ class IPAddress(Any): valid_noun = 'an IP address' introspect_type = 'ip_address' - def __init__(self, **kwargs: **AnyType) -> None: + def __init__(self, **kwargs: AnyType) -> None: super().__init__(IPv4Address(), IPv6Address(), **kwargs) diff --git a/conformity/fields/protocols.py b/conformity/fields/protocols.py index ec3dcb5..283cf66 100644 --- a/conformity/fields/protocols.py +++ b/conformity/fields/protocols.py @@ -1,15 +1,42 @@ from collections import abc import numbers -from typing import Any +from typing import ( + Any, + HashableType, + IterableType, + Tuple, + TypeVar, +) -from conformity.fields.base import BaseField +from conformity.fields.base import ( + BaseField, + BaseTypeField, +) +from conformity.fields.utils import strip_none from conformity.types import ( Error, Validation, ) +from conformity.typing import Introspection + +__all__ = ( + 'Callable', + 'Collection', + 'Container', + 'Hashable', + 'Iterable', + 'Mapping', + 'Number', + 'Sequence', + 'Set', + 'Sized', +) + +T = TypeVar['T'] -class Callable(BaseField): + +class Callable(BaseTypeField): """ Validates that the value is callable """ @@ -18,7 +45,7 @@ class Callable(BaseField): valid_noun = 'callable' -class Container(BaseField): +class Container(BaseTypeField): """ Validates that the value implements the Container protocol (i.e., implements the __conatins__ method) @@ -27,7 +54,7 @@ class Container(BaseField): valid_type = abc.Container -class Hashable(BaseField): +class Hashable(BaseTypeField): """ Validates that the value is hashable (i.e., `hash(...)` can be called on the value without error). @@ -37,7 +64,7 @@ class Hashable(BaseField): valid_noun = 'hashable' -class Iterable(BaseField): +class Iterable(BaseTypeField): """ Validates that the value is iterable """ @@ -46,7 +73,7 @@ class Iterable(BaseField): valid_noun = 'iterable' -class Mapping(BaseField): +class Mapping(BaseTypeField): """ Validates that the value implements the Mapping protocol (e.g. a dictionary) """ @@ -54,7 +81,7 @@ class Mapping(BaseField): valid_type = abc.Mapping -class Number(BaseField): +class Number(BaseTypeField): """ Validates that the value is a Number and, optionally, enforces boundaries for that number with the `gt`, `gte`, `lt`, and `lte` arguments. @@ -65,14 +92,14 @@ class Number(BaseField): def __init__( self, *, - description: str, - allow_boolean: bool=False, - gt: int=None, - gte: int=None, - lt: int=None, - lte: int=None, + allow_boolean: bool = False, + gt: int = None, + gte: int = None, + lt: int = None, + lte: int = None, + **kwargs: Any ): - super().__init__(description) + super().__init__(**kwargs) self.allow_boolean = allow_boolean self.gt = gt self.gte = gte @@ -104,7 +131,7 @@ def introspect(self) -> Introspection: }).update(super().introspect()) -class Sized(BaseField): +class Sized(BaseTypeField): """ Validates that the value implements the Sized protocol (i.e., implements __len__). Optionally, enforces minimum and maximum lengths on sized values. @@ -116,11 +143,11 @@ class Sized(BaseField): def __init__( self, *, - description: str=None, - min_length: int=None, - max_length: int=None, + min_length: int = None, + max_length: int = None, + **kwargs: Any ): - super().__init__(description=description) + super().__init__(**kwargs) # Validate the length constraints if min_length is not None: @@ -134,7 +161,7 @@ def __init__( def validate(self, value: Any) -> Validation: v = super().validate(value) - if v.is_valid(): + if not v.errors: value_len = len(value) if self.min_length is not None and value_len < self.min_length: v.errors.append( @@ -159,50 +186,47 @@ def introspect(self) -> Introspection: class Collection(Sized): """ - Validates that the value is a collection of items that all pass validation with - the Conformity field passed to the `contents` argument and optionally - establishes boundaries for that list with the `max_length` and - `min_length` arguments. + Validates that the value is a collection of items that all pass validation + with the Conformity field passed to the `contents` argument and optionally + establishes boundaries for that list with the `max_length` and `min_length` + arguments. """ valid_type = abc.Collection - def __init__(self, contents: BaseField, *, **kwargs) -> None: + def __init__( + self, + contents: BaseField, + **kwargs: Any + ) -> None: super().__init__(**kwargs) self.contents = contents - def validate(self, value: AnyType) -> Validation: + def validate(self, value: Any) -> Validation: v = super().validate(value) if not v.errors: - for lazy_pointer, element in self._enumerate(value): - v.errors.extend( - update_pointer(error, lazy_pointer.get()) - for error in (self.contents.errors(element) or []) - ) - v.warnings.extend( - update_pointer(warning, lazy_pointer.get()) - for warning in self.contents.warnings(element) + for p, element in self._enumerate(value): + v.extend( + self.contents.validate(element), + pointer=p, ) return v @classmethod - def _enumerate(cls, values): - # We use a lazy pointer here so that we don't evaluate the pointer for every item that doesn't generate an - # error. We only evaluate the pointer for each item that does generate an error. This is critical in sets, - # where the pointer is the value converted to a string instead of an index. - return ((cls.LazyPointer(i, value), value) for i, value in enumerate(values)) + def _enumerate( + cls, + values: IterableType[T], + ) -> IterableType[Tuple[HashableType, T]]: + # Overridable value pointer enumeration method + return enumerate(values) def introspect(self) -> Introspection: return strip_none({ 'contents': self.contents.introspect(), }).update(super().introspect()) - class LazyPointer(object): - def __init__(self, index, _): - self.get = lambda: index - class Sequence(Collection): valid_type = abc.Sequence @@ -219,6 +243,12 @@ class Set(Collection): valid_type = abc.Set introspect_type = 'set' - class LazyPointer(object): - def __init__(self, _, value): - self.get = lambda: '[{}]'.format(str(value)) + @classmethod + def _enumerate( + cls, + values: IterableType[T], + ) -> IterableType[Tuple[HashableType, T]]: + return ( + (str(value), value) + for value in values + ) diff --git a/conformity/fields/python/logging.py b/conformity/fields/python/logging.py new file mode 100644 index 0000000..4c85ec6 --- /dev/null +++ b/conformity/fields/python/logging.py @@ -0,0 +1,394 @@ +import collections +import logging +from typing import ( + Any, + List, + Mapping, + Tuple, +) + +from conformity import fields +from conformity.constants import ERROR_CODE_UNKNOWN +from conformity.types import ( + Error, + Validation, +) + + +__all__ = ( + 'PythonLogLevel', + 'PYTHON_LOGGER_SCHEMA', + 'PYTHON_LOGGING_CONFIG_SCHEMA', + 'PYTHON_ROOT_LOGGER_SCHEMA', +) + + +class PythonLogLevel(fields.Constant): + """ + A pre-defined `Constant` field with all the possible Python log levels + populated. All you need is a description for documentation. + """ + + def __init__(self, **kwargs: Any) -> None: + """ + Constructs a `PythonLogLevel` field. + + :param description: The description for documentation + """ + super().__init__( + logging.getLevelName(logging.DEBUG), + logging.getLevelName(logging.INFO), + logging.getLevelName(logging.WARNING), + logging.getLevelName(logging.ERROR), + logging.getLevelName(logging.CRITICAL), + **kwargs + ) + + +class _LoggingValidator(fields.BaseTypeField): + + valid_type = dict + valid_noun = 'a logging.config.dictConfig dictionary' + introspect_type = 'logging.config.dictConfig' + + @staticmethod + def _ensure_configured( + source: Mapping[str, Any], + name: str, + validation: Validation, + referencer_noun: str, + referencer: str, + referenced_noun: str, + pointer: str, + pointer_args: Tuple[Any, ...], + ): + if name not in source: + validation.errors.append(Error( + code=ERROR_CODE_UNKNOWN, + message=( + '{referencer_noun} "{referencer}" references ' + '{referenced_noun} "{name}", which is not configured.' + ).format( + referencer_noun=referencer_noun, + referencer=referencer, + referenced_noun=referenced_noun, + name=name, + ), + pointer=pointer.format(*pointer_args), + )) + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if v.errors: + return v + + formatters = value.get('formatters', {}) # type: Mapping[str, Mapping[str, str]] + filters = value.get('filters', {}) # type: Mapping[str, Mapping[str, Any]] + handlers = value.get('handlers', {}) # type: Mapping[str, Mapping[str, Any]] + loggers = value.get('loggers', {}) # type: Mapping[str, Mapping[str, Any]] + root = value.get('root', {}) # type: Mapping[str, Any] + + if filters: + for filter_name, filter_config in filters.items(): + standard_keys = 0 + if '()' in filter_config: + standard_keys = 1 + is_standard = filter_config['()'] == 'logging.Filter' + else: + is_standard = True + if 'name' in filter_config: + standard_keys += 1 + + if is_standard and len(filter_config) > standard_keys: + v.errors.append(Error( + code=ERROR_CODE_UNKNOWN, + message=( + 'Not all keys supported for ' + 'filter named "{}"' + ).format(filter_name), + pointer='filters.{}'.format(filter_name), + )) + + if value.get('incremental', False) is not True: + if handlers: + for handler_name, handler_config in handlers.items(): + if 'formatter' in handler_config: + self._ensure_configured( + formatters, + handler_config['formatter'], + v, + 'Handler', + handler_name, + 'formatter', + 'handlers.{}.formatter', + (handler_name, ), + ) + + handler_filters = handler_config.get('filters', []) # type: List[str] + for i, filter in enumerate(handler_filters): + self._ensure_configured( + filters, + filter, + v, + 'Handler', + handler_name, + 'filter', + 'handlers.{}.filters.{}', + (handler_name, i), + ) + + if loggers: + for logger_name, logger_config in loggers.items(): + logger_filters = logger_config.get('filters', []) # type: List[str] + for i, filter in enumerate(logger_filters): + self._ensure_configured( + filters, + filter, + v, + 'Logger', + logger_name, + 'filter', + 'loggers.{}.filters.{}', + (logger_name, i), + ) + + logger_handlers = logger_config.get('handlers', []) # type: List[str] + for i, handler in enumerate(logger_handlers): + self._ensure_configured( + handlers, + handler, + v, + 'Logger', + logger_name, + 'handler', + 'loggers.{}.handlers.{}', + (logger_name, i), + ) + + if root: + root_filters = root.get('filters', []) # type: List[str] + for i, filter in enumerate(root_filters): + self._ensure_configured( + filters, + filter, + v, + 'Logger', + 'root', + 'filter', + 'root.filters.{}', + (i, ), + ) + + root_handlers = root.get('handlers', []) # type: List[str] + for i, handler in enumerate(root_handlers): + self._ensure_configured( + handlers, + handler, + v, + 'Logger', + 'root', + 'handler', + 'root.handlers.{}', + (i, ), + ) + + return v + + +PYTHON_ROOT_LOGGER_SCHEMA = fields.Dictionary( + { + 'level': PythonLogLevel( + description=( + 'The logging level at or above which this logger will handle ' + 'logging events and send them to its configured handlers.' + ), + ), + 'filters': fields.List( + fields.String(), + description=( + 'A list of references to keys from `filters` for assigning ' + 'those filters to this logger.' + ), + ), + 'handlers': fields.List( + fields.String(), + description=( + 'A list of references to keys from `handlers` for assigning ' + 'those handlers to this logger.' + ), + ), + }, + optional_keys=('level', 'filters', 'handlers'), +) + + +PYTHON_LOGGER_SCHEMA = fields.Dictionary( + PYTHON_ROOT_LOGGER_SCHEMA, + { + 'propagate': fields.Boolean( + description=( + 'Whether logging events handled by this logger should ' + 'propagate to other loggers and/or the root logger. Defaults ' + 'to `True`.' + ), + ), + }, + optional_keys=('propagate', ), +) + + +PYTHON_LOGGING_CONFIG_SCHEMA = fields.Chain( + fields.Dictionary(collections.OrderedDict(( + (fields.Optional('version'), fields.Integer(gte=1, lte=1)), + (fields.Optional('formatters'), fields.Dictionary( + ( + fields.String(), + fields.Dictionary( + { + 'format': fields.String( + description=( + 'The format string for this formatter (see ' + 'https://docs.python.org/3/library/logging.html' + '#logrecord-attributes).' + ), + ), + fields.Optional('datefmt'): fields.String( + description=( + 'The optional date format used when formatting ' + 'dates in the log output (see https://docs.' + 'python.org/3/library/datetime.html' + '#strftime-strptime-behavior).' + ), + ), + }, + ), + ), + description=( + 'This defines a mapping of logging formatter names to ' + 'formatter configurations. The `format` key specifies the log ' + 'format and the `datefmt` key specifies the date format.' + ), + )), + (fields.Optional('filters'), fields.Dictionary( + ( + fields.String(), + fields.Dictionary( + { + fields.Optional('()'): fields.TypePath( + base_classes=logging.Filter, + description=( + 'The optional, fully-qualified name of the ' + 'class extending `logging.Filter`, used to ' + 'override the default class `logging.Filter`.' + ), + ), + fields.Optional('name'): fields.String( + description=( + 'The optional filter name which will be passed ' + 'to the `name` argument of the ' + '`logging.Filter` class.' + ), + ), + }, + (fields.Hashable, fields.Any), + ), + ), + description=( + 'This defines a mapping of logging filter names to filter ' + 'configurations. If a config has only the `name` key, then ' + '`logging.Filter` will be instantiated with that argument. You ' + 'can specify a `()` key (yes, really) to override the default ' + '`logging.Filter` class with a custom filter implementation ' + '(which should extend `logging.Filter`). Extra keys are ' + 'allowed only for custom implementations having extra ' + 'constructor arguments matching those key names.' + ), + )), + (fields.Optional('handlers'), fields.Dictionary( + ( + fields.String(), + fields.Dictionary( + { + 'class': fields.TypePath( + base_classes=logging.Handler, + description=( + 'The fully-qualified name of the class ' + 'extending `logging.Handler`.' + ), + ), + fields.Optional('level'): PythonLogLevel( + description=( + 'The logging level at or above which this ' + 'handler will emit logging events.' + ), + ), + fields.Optional('formatter'): fields.String( + description=( + 'A reference to a key from `formatters` for ' + 'assigning that formatter to this handler.' + ), + ), + fields.Optional('filters'): fields.List( + fields.String(), + description=( + 'A list of references to keys from `filters` ' + 'for assigning those filters to this handler.' + ), + ), + }, + (fields.Hashable(), fields.Anything()), + ), + ), + description=( + 'This defines a mapping of logging handler names to handler ' + 'configurations. The `class` key is the importable Python path ' + 'to the class extending `logging.Handler`. The `level` and ' + '`filters` keys apply to all handlers. The `formatter` key is ' + 'valid for all handlers, but not all handlers will use it. ' + 'Extra keys are allowed only for handlers having extra ' + 'constructor arguments matching those key names.' + ), + )), + (fields.Optional('loggers'), fields.Dictionary( + ( + fields.String(), + PYTHON_LOGGER_SCHEMA, + ), + description=( + 'This defines a mapping of logger names to logger ' + 'configurations. A log event not handled by one of these ' + 'configured loggers (if any) will instead be handled by the ' + 'root logger. A log event handled by one of these configured ' + 'loggers may still be handled by another logger or the root ' + 'logger unless its `propagate` key is set to `False`.' + ), + )), + (fields.Optional('root'), PYTHON_ROOT_LOGGER_SCHEMA), + (fields.Optional('incremental'), fields.Boolean( + description=( + 'Whether this configuration should be considered incremental ' + 'to any existing configuration. It defaults to `False` and it ' + 'is rare that you should ever need to change that.' + ), + )), + (fields.Optional('disable_existing_loggers'), fields.Boolean( + description=( + 'Whether all existing loggers (objects obtained from ' + '`logging.getLogger()`) should be disabled when this logging ' + 'config is loaded. Take our advice and *always* set this to ' + '`False`. It defaults to `True` and you almost never want ' + 'that, because loggers in already-loaded modules will stop ' + 'working.' + ), + )), + ))), + _LoggingValidator(), + description=( + 'Settings to enforce the standard Python logging dictionary-based ' + 'configuration, as you would load with `logging.config.dictConfig()`. ' + 'For more information than the documentation here, see ' + 'https://docs.python.org/3/library/logging.config.html' + '#configuration-dictionary-schema.' + ), +) +"""""" # Empty docstring to make autodoc document this data diff --git a/conformity/fields/simple.py b/conformity/fields/simple.py index 40f180c..9097232 100644 --- a/conformity/fields/simple.py +++ b/conformity/fields/simple.py @@ -1,8 +1,12 @@ import decimal from typing import Any +from conformity.fields.base import BaseTypeField +from conformity.fields.protocols import ( + Number, + Sized, +) from conformity.fields.utils import strip_none -from conformity.fields.base import BaseField from conformity.types import ( Error, Validation, @@ -22,7 +26,7 @@ # # Numeric types # -class Boolean(BaseField): +class Boolean(BaseTypeField): """ Validates that the value is a boolean """ @@ -57,6 +61,7 @@ class Decimal(Number): valid_type = decimal.Decimal + # # String types # @@ -68,7 +73,12 @@ class String(Sized): valid_type = str introspect_type = 'string' - def __init__(self, *, allow_blank: bool=True, **kwargs): + def __init__( + self, + *, + allow_blank: bool = True, + **kwargs: Any + ): super().__init__(**kwargs) self.allow_blank = allow_blank diff --git a/conformity/fields/structures.py b/conformity/fields/structures.py index ba1e921..6bdcf36 100644 --- a/conformity/fields/structures.py +++ b/conformity/fields/structures.py @@ -1,38 +1,29 @@ import abc from typing import ( - Any as AnyType, - Callable, - Container, - Dict, - FrozenSet, - Generic, + Any, Hashable as HashableType, + Iterable, List as ListType, - Mapping, - Optional, - Sequence as SequenceType, - Sized, - Tuple as TupleType, - Type, - TypeVar, - Union, - cast, + Optional as OptionalType, ) from conformity.constants import ( ERROR_CODE_MISSING, ERROR_CODE_UNKNOWN, ) -from conformity.fields.basic import Anything + +from conformity.fields.base import ( + BaseField, + BaseTypeField, +) from conformity.fields.protocols import ( Collection, Hashable, Sized, ) -from conformity.fields.utils import ( - strip_none, - update_pointer, -) +from conformity.fields.meta import Anything +from conformity.fields.modifiers import Optional +from conformity.fields.utils import strip_none from conformity.types import ( Error, Validation, @@ -40,6 +31,12 @@ ) from conformity.typing import Introspection +__all__ = ( + 'Dictionary', + 'List', + 'Tuple', +) + class List(Collection): """ @@ -49,7 +46,7 @@ class List(Collection): valid_type = list -class Dictionary(BaseField): +class Dictionary(BaseTypeField): """ Validates that the value is a dictionary with a specific set of keys and value that validate with the Conformity fields associated with those keys @@ -72,18 +69,17 @@ class Dictionary(BaseField): introspect_type = 'dictionary' # Deprecated class var method - contents = None # type: Optional[AnyContents] - optional_keys = None # type: Optional[bool] + contents = None # type: OptionalType[BaseField] + optional_keys = None # type: OptionalType[bool] allow_extra_keys = False # type: bool # TODO: add __class__.description and __init__ processing? def __init__( self, *contents, - *, - optional_keys: Iterable[HashableType]=None, - allow_extra_keys: bool=False, - **kwargs + optional_keys: Iterable[HashableType] = None, + allow_extra_keys: bool = False, + **kwargs: Any ) -> None: super.__init__(**kwargs) @@ -156,7 +152,7 @@ def __init__( # Add a variable field that accepts anything self._variable_fields.append((Hashable(), Anything())) - def validate(self, value: AnyType) -> Validation: + def validate(self, value: Any) -> Validation: v = super().validate(value) if v.errors: return v @@ -199,7 +195,7 @@ def validate(self, value: AnyType) -> Validation: # else: # # Check key type # result.extend( - # update_pointer(error, key) + # #pdate_pointer(error, key) # for error in (field.errors(value[key]) or []) # ) # # Check for extra keys @@ -212,31 +208,15 @@ def validate(self, value: AnyType) -> Validation: # ), # ) - return result - - def warnings(self, value): - # type: (AnyType) -> ListType[Warning] - if not isinstance(value, dict): - return [] - - result = [] # type: ListType[Warning] - for key, field in self.contents.items(): - if key in value: - result.extend( - update_pointer(warning, key) - for warning in field.warnings(value[key]) - ) - - return result + return v def extend( self, *contents, - * - optional_keys: Iterable[HashableType]=None, - allow_extra_keys: bool=None, - description: str=None, - ) -> Dictionary: + optional_keys: Iterable[HashableType] = None, + allow_extra_keys: bool = None, + description: str = None, + ) -> 'Dictionary': """ Creates a new Dictionary instance that "extends" from this one. @@ -268,29 +248,7 @@ def introspect(self) -> Introspection: }).update(super().introspect()) -class SchemalessDictionary(Dictionary, Sized): - """ - Validates that the value is a dictionary of any keys and values, but - optionally enforcing that the keys pass the Conformity validation specified - with the `key_type` argument and/or that the values pass the Conformity - validation specified with the `value_type` argument. Size of the dictionary - can also be constrained with the optional `max_length` and `min_length` - arguments. - """ - - introspect_type = 'schemaless_dictionary' - - def __init__( - self, - *, - key_type: BaseField=None, - value_type: BaseField=None, - **kwargs - ) -> None: - super().__init__((key_type, value_type), **kwargs) - - -class Tuple(BaseField): +class Tuple(BaseTypeField): """ Validates that the value is a tuple with the same number of arguments as the number of positional arguments passed to this field, and that each argument @@ -300,7 +258,7 @@ class Tuple(BaseField): valid_type = tuple - def __init__(self, *contents: Iterable[BaseField], **kwargs) -> None: + def __init__(self, *contents: BaseField, **kwargs: Any) -> None: super().__init__(**kwargs) self.contents = contents @@ -313,7 +271,7 @@ def __init__(self, *contents: Iterable[BaseField], **kwargs) -> None: 'field instance, is actually: {!r}' ).format(i, c)) - def validate(self, value: AnyType) -> Validation: + def validate(self, value: Any) -> Validation: v = super().validate(value) if v.errors: return v @@ -331,13 +289,9 @@ def validate(self, value: AnyType) -> Validation: # Validate each element against each field for i, (c_elem, v_elem) in enumerate(zip(self.contents, value)): - v.errors.extend( - update_pointer(error, i) - for error in (c_elem.errors(v_elem) or []) - ) - v.warnings.extend( - update_pointer(warning, i) - for warning in field.warnings(item) + v.extend( + c_elem.validate(v_elem), + pointer=i, ) return v diff --git a/conformity/fields/temporal.py b/conformity/fields/temporal.py index 6d43ff5..17cbcd0 100644 --- a/conformity/fields/temporal.py +++ b/conformity/fields/temporal.py @@ -5,7 +5,7 @@ TypeVar, ) -from conformity.fields.base import BaseField +from conformity.fields.base import BaseTypeField from conformity.fields.utils import strip_none from conformity.types import ( Error, @@ -13,6 +13,14 @@ ) from conformity.typing import Introspection +__all__ = ( + 'Date', + 'DateTime', + 'Time', + 'TimeDelta', + 'TZInfo', +) + try: # noinspection PyUnresolvedReferences @@ -27,7 +35,7 @@ T = TypeVar('T', datetime.date, datetime.time, datetime.datetime, datetime.timedelta) -class TemporalBase(Generic[T], BaseField): +class TemporalBase(Generic[T], BaseTypeField): """ Common base class for all temporal types. Cannot be used on its own without extension. """ @@ -40,11 +48,11 @@ class TemporalBase(Generic[T], BaseField): def __init__( self, *, - gt: T=None, - gte: T=None, - lt: T=None, - lte: T=None, - **kwargs + gt: T = None, + gte: T = None, + lt: T = None, + lte: T = None, + **kwargs: AnyType ) -> None: super().__init__(**kwargs) self.gt = self.validate_parameter('gt', gt) @@ -54,7 +62,7 @@ def __init__( @classmethod def validate_parameter(cls, name: str, value: T) -> T: - if value is not None and not isinstance(value, self.valid_type): + if value is not None and not isinstance(value, cls.valid_type): raise TypeError(( "'{}' value {!r} cannot be used for " "comparisons in this type" @@ -86,7 +94,7 @@ def introspect(self) -> Introspection: }).update(super().introspect()) -class DateTime(TemporalBase[DATETIME_TYPES]) +class DateTime(TemporalBase[DATETIME_TYPES]): """ Validates that the value is a `datetime.datetime` instance and optionally enforces boundaries for that `datetime` with the `gt`, `gte`, `lt`, and @@ -134,7 +142,7 @@ class TimeDelta(TemporalBase[datetime.timedelta]): introspect_type = 'timedelta' -class TZInfo(BaseField): +class TZInfo(BaseTypeField): """ Validates that the value is a `datetime.tzinfo` instance. """ diff --git a/conformity/fields/utils.py b/conformity/fields/utils.py index fd1a33c..8543ad4 100644 --- a/conformity/fields/utils.py +++ b/conformity/fields/utils.py @@ -1,21 +1,15 @@ from typing import ( Dict, - Hashable, TypeVar, ) -from conformity.types import ( - Issue, - Error, - Warning, +__all__ = ( + 'strip_none', ) - KT = TypeVar('KT') VT = TypeVar('VT') -IssueVar = TypeVar('IssueVar', Issue, Error, Warning) - def strip_none(value: Dict[KT, VT]) -> Dict[KT, VT]: """ @@ -24,15 +18,3 @@ def strip_none(value: Dict[KT, VT]) -> Dict[KT, VT]: that might legitimately contain a `None`. """ return {k: v for k, v in value.items() if v is not None} - - -def update_pointer(issue: IssueVar, pointer_or_prefix: Hashable) -> IssueVar: - """ - Helper function to update a pointer attribute with a (potentially prefixed) - dictionary key or list index. - """ - if issue.pointer: - issue.pointer = '{}.{}'.format(pointer_or_prefix, issue.pointer) - else: - issue.pointer = '{}'.format(pointer_or_prefix) - return issue diff --git a/conformity/settings/__init__.py b/conformity/settings/__init__.py index 6840f37..6ed641c 100644 --- a/conformity/settings/__init__.py +++ b/conformity/settings/__init__.py @@ -7,7 +7,6 @@ Iterable, Iterator, KeysView, - List, Mapping, Optional, Tuple, diff --git a/conformity/types.py b/conformity/types.py index 6e560f2..184ef87 100644 --- a/conformity/types.py +++ b/conformity/types.py @@ -1,6 +1,8 @@ from typing import ( + Any, + Hashable, List, - Optional, + TypeVar, ) from conformity.constants import ( @@ -20,8 +22,8 @@ class Issue: """ Represents an issue found during validation of a value. """ - def __init__(self, message: str, pointer: Optional[str]=None) -> None: - selef.message = message + def __init__(self, message: str, pointer: str = None) -> None: + self.message = message self.pointer = pointer @@ -31,11 +33,11 @@ class Error(Issue): """ def __init__( self, - message: str, - pointer: Optional[str]=None, - code: Optional[str]=None, + *, + code: str = None, + **kwargs: Any ): - super().__init__(message, pointer) + super().__init__(**kwargs) self.code = code or ERROR_CODE_INVALID @@ -45,11 +47,11 @@ class Warning(Issue): """ def __init__( self, - message: str, - pointer: Optional[str]=None, - code: Optional[str]=None, + *, + code: str = None, + **kwargs: Any ): - super().__init__(message, pointer) + super().__init__(**kwargs) self.code = code or WARNING_CODE_WARNING @@ -57,14 +59,48 @@ class Validation(object): def __init__( self, *, - errors: Optional[List[Error]]=None, - warnings: Optional[List[Error]]=None, + errors: List[Error] = None, + warnings: List[Warning] = None, ): - self.errors = errors or [] - self.warnings = warnings or [] + self.errors = errors or [] # type: List[Error] + self.warnings = warnings or [] # type: List[Warning] def __bool__(self): return self.is_valid() def is_valid(self): return bool(self.errors) + + def extend( + self, + other: 'Validation', + *, + pointer: Hashable = None, + ) -> None: + if pointer is not None: + self.errors.extend([ + _update_pointer(error) + for error in other.errors + ]) + self.warnings.extend([ + _update_pointer(warning) + for warning in other.warnings + ]) + else: + self.errors.extend(other.errors) + self.warnings.extend(other.warnings) + + +IssueVar = TypeVar('IssueVar', Issue, Error, Warning) + + +def _update_pointer(issue: IssueVar, pointer_or_prefix: Hashable) -> IssueVar: + """ + Helper function to update a pointer attribute with a (potentially prefixed) + dictionary key or list index. + """ + if issue.pointer: + issue.pointer = '{}.{}'.format(pointer_or_prefix, issue.pointer) + else: + issue.pointer = '{}'.format(pointer_or_prefix) + return issue From bce2198759696b5670cd0de7f1d091a5431626d9 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Thu, 6 Aug 2020 21:43:44 -0500 Subject: [PATCH 08/10] Move modifier fields. Move python import related fields. Update type aliases. --- conformity/fields/modifiers.py | 88 +++++++ conformity/fields/python/__init__.py | 11 + conformity/fields/python/imports.py | 370 +++++++++++++++++++++++++++ conformity/typing.py | 27 ++ 4 files changed, 496 insertions(+) create mode 100644 conformity/fields/modifiers.py create mode 100644 conformity/fields/python/__init__.py create mode 100644 conformity/fields/python/imports.py create mode 100644 conformity/typing.py diff --git a/conformity/fields/modifiers.py b/conformity/fields/modifiers.py new file mode 100644 index 0000000..6832ac9 --- /dev/null +++ b/conformity/fields/modifiers.py @@ -0,0 +1,88 @@ +from typing import Any + +from conformity.constants import WARNING_CODE_FIELD_DEPRECATED +from conformity.fields.base import BaseField +from conformity.types import ( + Warning, + Validation, +) +from conformity.typing import Introspection + +__all__ = ( + 'Deprecated', + 'Optional', +) + + +class Deprecated(BaseField): + """ + Modifier that marks a field as deprecated + """ + default_message = 'This field has been deprecated' + + def __init__( + self, + field: BaseField, + *, + message: str = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + # Validate arguments + if not isinstance(field, BaseField): + raise TypeError('field argument must be a Conformity field') + if message is None: + message = self.default_message + elif not isinstance(message, str): + raise TypeError('message argument must be a string') + + self.field = field + self.message = message + + def validate(self, value: Any) -> Validation: + # Pass through validation, then add the deprecation warning + v = self.field.validate(value) + v.warnings.append(Warning( + code=WARNING_CODE_FIELD_DEPRECATED, + message=self.message, + )) + return v + + def introspect(self) -> Introspection: + # Pass through introspection, then add the deprecated field + field_introspection = self.field.introspect() + field_introspection['deprecated'] = True + return field_introspection + + +class Optional(BaseField): + """ + Modifier that marks a field as optional + """ + def __init__( + self, + field: BaseField, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + # Validate arguments + if not isinstance(field, BaseField): + raise TypeError('field argument must be a Conformity field') + + self.field = field + + def validate(self, value: Any) -> Validation: + # Pass through validation, then allow None on error + v = self.field.validate(value) + if v.errors and value is None: + v.errors = [] + + return v + + def introspect(self) -> Introspection: + # Pass through introspection, then add the optional field + field_introspection = self.field.introspect() + field_introspection['optional'] = True + return field_introspection diff --git a/conformity/fields/python/__init__.py b/conformity/fields/python/__init__.py new file mode 100644 index 0000000..3f59b1f --- /dev/null +++ b/conformity/fields/python/__init__.py @@ -0,0 +1,11 @@ +from conformity.fields.python.imports import ( + ClassConfigurationSchema, + PythonPath, + TypePath, +) + +__all__ = ( + 'ClassConfigurationSchema', + 'PythonPath', + 'TypePath', +) diff --git a/conformity/fields/python/imports.py b/conformity/fields/python/imports.py new file mode 100644 index 0000000..37d1356 --- /dev/null +++ b/conformity/fields/python/imports.py @@ -0,0 +1,370 @@ +import importlib +from types import ModuleType +from typing import ( + Any, + Callable, + Dict, + Hashable as HashableType, + MutableMapping, + Tuple, + Type as TypeType, + Union, +) + +from conformity.constants import ( + ERROR_CODE_MISSING, + ERROR_CODE_UNKNOWN, +) +from conformity.error import ValidationError +from conformity.fields.base import ( + BaseField, + BaseTypeField, +) +from conformity.fields.meta import Type +from conformity.fields.simple import String +from conformity.fields.structures import Dictionary +from conformity.fields.utils import strip_none +from conformity.types import ( + Error, + Validation, +) +from conformity.typing import Introspection + +__all__ = ( + 'PythonPath', + 'TypePath', + 'ClassConfigurationSchema', +) + + +class PythonPath(String): + """ + Validates that a value is a string path to an importable Python type, + function, or variable, including the full path to the enclosing module. Both + '.' and ':' are recognized as valid separators between module name and item + name, but if the item is not a top-level member of the module, it can only + be accessed by using ':' as the separator. + + Examples of valid Python path strings: + + foo.bar.MyClass + foo.bar:MyClass + foo.bar.my_function + foo.bar.MY_CONSTANT + foo.bar:MyClass.MY_CONSTANT + baz.qux:ParentClass.SubClass + + This field performs two validations: + 1. That the path is a unicode string, and + 2. That the item is importable (exists) + + If you later need to actually access that item, you can use the + `resolve_python_path` static method. Imported items are cached for faster + future lookup. + + You can optionally specify a `value_schema` argument to this field, itself a + Conformity field, which will perform further validation on the value of the + imported item. + """ + introspect_type = 'python_path' + + _module_cache = {} # type: Dict[str, ModuleType] + _import_cache = {} # type: Dict[Tuple[str, str], Any] + + def __init__( + self, + *, + value_schema: BaseField = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + # Validate arguments + if value_schema is not None and not isinstance(value_schema, BaseField): + raise TypeError('value_schema must be a Conformity field') + + self.value_schema = value_schema + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if v.errors: + return v + + try: + thing = self.resolve_python_path(value) + except ValueError: + v.errors.append(Error( + 'Value "{}" is not a valid Python import path'.format(value) + )) + except ImportError as e: + v.errors.append(Error( + 'ImportError: {}'.format(str(e.args[0])) + )) + except AttributeError as e: + v.errors.append(Error( + 'AttributeError: {}'.format(str(e.args[0])) + )) + else: + if self.value_schema is not None: + v.extend(self.value_schema.errors(thing)) + + return v + + def introspect(self) -> Introspection: + return strip_none({ + 'value_schema': ( + self.value_schema.introspect() + if self.value_schema + else None + ), + }).update(super().introspect()) + + @classmethod + def resolve_python_path(cls, type_path: str) -> Any: + if ':' in type_path: + module_name, local_path = type_path.split(':', 1) + else: + module_name, local_path = type_path.rsplit('.', 1) + + cache_key = (module_name, local_path) + if cache_key in cls._import_cache: + return cls._import_cache[cache_key] + + if module_name not in cls._module_cache: + cls._module_cache[module_name] = importlib.import_module( + module_name, + ) + + thing = cls._module_cache[module_name] # type: Any + for bit in local_path.split('.'): + thing = getattr(thing, bit) + + cls._import_cache[cache_key] = thing + + return thing + + +class TypePath(PythonPath): + """ + A special convenience `PythonPath` extension for expecting the imported + item to be a type + """ + def __init__( + self, + *, + base_classes: Union[TypeType, Tuple[TypeType, ...]] = None, + **kwargs: Any + ): + super().__init__( + value_schema=Type(base_classes=base_classes), + **kwargs + ) + + +class ClassConfigurationSchema(BaseTypeField): + """ + A special-case dictionary field that accepts exactly two keys: + * `path` - a `TypePath`-validated string), and + * `kwargs` - a `Dictionary`-or-subclass-validated dict + + It can discover initialization schema from classes and validate that schema + prior to instantiation. By default, the dictionary is mutated to add an + `object` key containing the resolved class, but this behavior can be + disabled by specifying `add_class_object_to_dict=False` to the field + arguments. If you experience circular dependency errors when using this + field, you can mitigate this by specifying `eager_default_validation=False` + to the field arguments. + + Typical usage would be as follows, in Python pseudocode: + + .. code-block:: python + + class BaseThing: + ... + + @fields.ClassConfigurationSchema.provider(fields.Dictionary({...}, ...)) + class Thing1(BaseThing): + ... + + @fields.ClassConfigurationSchema.provider(fields.Dictionary({...}, ...)) + class Thing2(BaseThing): + ... + + settings = get_settings_from_something() + schema = fields.ClassConfigurationSchema(base_class=BaseThing) + errors = schema.errors(**settings[kwargs]) + if errors: + ... handle errors ... + + thing = settings['object'](settings) + + Another approach, using the helper method on the schema, simplifies that + last part: + + .. code-block:: python + + schema = fields.ClassConfigurationSchema(base_class=BaseThing) + + # the following raises a ValidationError + thing = schema.instantiate_from(get_settings_from_something()) + + However, note that, in both cases, instantiation is not nested. If the + settings schema Dictionary on some class has a key (or further down) whose + value is another `ClassConfigurationSchema`, code that consumes those + settings will also have to instantiate objects from those settings. + Validation, however, will be nested as in all other things Conformity. + """ + valid_type = dict + valid_noun = 'a class configuration dictionary' + introspect_type = 'class_config_dictionary' + + _init_schema_attribute = '_conformity_initialization_schema' + + def __init__( + self, + *, + base_class: type = None, + default_path: str = None, + eager_default_validation: bool = True, + add_class_object_to_dict: bool = True, + **kwargs: Any + ): + super().__init__(**kwargs) + self._schema_cache = {} # type: Dict[str, Dictionary] + + base_class = base_class or getattr(self.__class__, 'base_class', object) + if not isinstance(base_class, type): + raise TypeError('base_class must be a type') + self.switch_field_schema = TypePath(base_classes=base_class) + + self.default_path = ( + default_path or getattr(self.__class__, 'default_path', None) + ) + if self.default_path and eager_default_validation: + self.initiate_cache_for(self.default_path) + self.default_path = default_path + + self.add_class_object_to_dict = add_class_object_to_dict + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if v.errors: + return v + + # Check for extra keys + # Note: object is allowed in case this gets validated twice + extra_keys = set(value.keys()) - set(('path', 'kwargs', 'object')) + if extra_keys: + v.errors.append(Error( + 'Extra keys present: {}'.format( + ', '.join(str(k) for k in extra_keys), + ), + code=ERROR_CODE_UNKNOWN, + )) + return v + + sentinel = object() + path = value.get('path', sentinel) + if path is sentinel and not self.default_path: + v.errors.append(Error( + 'Missing key (and no default specified): path', + code=ERROR_CODE_MISSING, + pointer='path', + )) + return v + + if not path or path is sentinel: + path = self.default_path + + path_v = self._populate_schema_cache_if_necessary(path) + if path_v.errors: + v.extend(path_v, pointer='path') + return v + + if isinstance(value, MutableMapping): + value['path'] = path + if self.add_class_object_to_dict: + value['object'] = PythonPath.resolve_python_path(path) + + kwargs_v = self._schema_cache[path].validate(value.get('kwargs', {})) + v.extend(kwargs_v, pointer='kwargs') + return v + + def initiate_cache_for(self, path: str) -> None: + v = self._populate_schema_cache_if_necessary(path) + if v.errors: + raise ValidationError(v.errors) + + def _populate_schema_cache_if_necessary(self, path: str) -> Validation: + if path in self._schema_cache: + return Validation() + + v = self.switch_field_schema.validate(path) + if v.errors: + return v + + clazz = PythonPath.resolve_python_path(path) + if not hasattr(clazz, self._init_schema_attribute): + v.errors.append(Error( + 'Neither class "{}" nor one of its superclasses was decorated ' + 'with @ClassConfigurationSchema.provider'.format(path), + )) + return v + + schema = getattr(clazz, self._init_schema_attribute) + if not isinstance(schema, Dictionary): + v.errors.append(Error( + 'Class "{}" attribute "{}" should be a Dictionary field or one ' + 'of its subclasses'.format(path, self._init_schema_attribute), + )) + return v + + self._schema_cache[path] = schema + return v + + def instantiate_from( + self, + configuration: MutableMapping[HashableType, Any], + ) -> Any: + if not isinstance(configuration, MutableMapping): + raise ValidationError([Error('Not a mutable mapping (dictionary)')]) + + v = self.validate(configuration) + if v.errors: + raise ValidationError(v.errors) + + clazz = configuration.get('object') + if not clazz: + clazz = PythonPath.resolve_python_path(configuration['path']) + + return clazz(**configuration.get('kwargs', {})) + + def introspect(self) -> Introspection: + return strip_none({ + 'switch_field': 'path', + 'switch_field_schema': self.switch_field_schema.introspect(), + 'kwargs_field': 'kwargs', + 'kwargs_contents_map': { + k: v.introspect() + for k, v in self._schema_cache.items() + }, + }).update(super().introspect()) + + @staticmethod + def provider(schema: Dictionary) -> Callable[[Type], Type]: + if not isinstance(schema, Dictionary): + raise TypeError( + '"schema" must be an instance of a Dictionary field or one of ' + 'its subclasses', + ) + + def wrapper(cls: Type) -> Type: + if not isinstance(cls, type): + raise TypeError( + 'ClassConfigurationSchema.provider can only decorate ' + 'classes' + ) + setattr(cls, ClassConfigurationSchema._init_schema_attribute, schema) + return cls + + return wrapper diff --git a/conformity/typing.py b/conformity/typing.py new file mode 100644 index 0000000..2a97aed --- /dev/null +++ b/conformity/typing.py @@ -0,0 +1,27 @@ +""" +Definitions for common custom type aliases +""" +from typing import ( + Any, + Dict, + List, + Union, +) + +__all__ = ( + 'Introspection', +) + + +# NOTE: The Introspection type alias is intended to represent a JSON +# serializable dictionary. However, as of August 2020, MyPy still does +# not support recursive types. As a result, the type alias is not +# currently strict enough to properly validate that a value is actually +# JSON serializable. +_IntrospectionValue = Union[ + int, float, bool, str, None, + List[Any], + Dict[str, Any], +] + +Introspection = Dict[str, _IntrospectionValue] From 4ec78b1f02b208c53bc009ed7f425c4f81af38d5 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Fri, 7 Aug 2020 11:01:56 -0500 Subject: [PATCH 09/10] Syntax fixes --- conformity/fields/geo.py | 2 +- conformity/types.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/conformity/fields/geo.py b/conformity/fields/geo.py index f70816a..19d4b06 100644 --- a/conformity/fields/geo.py +++ b/conformity/fields/geo.py @@ -1,4 +1,4 @@ -from conformity.fields.builtin import Float +from conformity.fields.simple import Float __all__ = ( 'Latitude', diff --git a/conformity/types.py b/conformity/types.py index 184ef87..3644c73 100644 --- a/conformity/types.py +++ b/conformity/types.py @@ -79,11 +79,11 @@ def extend( ) -> None: if pointer is not None: self.errors.extend([ - _update_pointer(error) + _update_pointer(error, pointer) for error in other.errors ]) self.warnings.extend([ - _update_pointer(warning) + _update_pointer(warning, pointer) for warning in other.warnings ]) else: @@ -94,7 +94,7 @@ def extend( IssueVar = TypeVar('IssueVar', Issue, Error, Warning) -def _update_pointer(issue: IssueVar, pointer_or_prefix: Hashable) -> IssueVar: +def _update_pointer( issue: IssueVar, pointer_or_prefix: Hashable) -> IssueVar: """ Helper function to update a pointer attribute with a (potentially prefixed) dictionary key or list index. From 85ff25c31d6c128332b4d27beb64c081040d8172 Mon Sep 17 00:00:00 2001 From: Seth Elliott Date: Mon, 10 Aug 2020 10:59:24 -0500 Subject: [PATCH 10/10] Fix a bunch of flake8/mypy issues. Also, begin reworking Dictionary validation to better handle variable key fields. --- conformity/fields/base.py | 7 ++ conformity/fields/email.py | 3 +- conformity/fields/legacy.py | 13 ++- conformity/fields/meta.py | 48 ++++---- conformity/fields/protocols.py | 38 ++++--- conformity/fields/simple.py | 3 +- conformity/fields/structures.py | 194 +++++++++++++++----------------- conformity/fields/temporal.py | 29 ++--- conformity/types.py | 10 +- 9 files changed, 174 insertions(+), 171 deletions(-) diff --git a/conformity/fields/base.py b/conformity/fields/base.py index 2673e8f..2a29b0d 100644 --- a/conformity/fields/base.py +++ b/conformity/fields/base.py @@ -5,6 +5,8 @@ from typing import ( Any, List, + Optional, + Type, ) from conformity.fields.utils import strip_none @@ -39,6 +41,8 @@ class BaseField(metaclass=_BaseMeta): implemented by BaseField subclasses. """ + introspect_type = None # type: Optional[str] + def __init__(self, *, description: str = None) -> None: self.description = description @@ -99,6 +103,9 @@ class BaseTypeField(BaseField, metaclass=_BaseTypeMeta): Validates that the value is an instance of `__class__.valid_type`. """ + valid_type = None # type: Optional[Type] + valid_noun = None # type: Optional[str] + def validate(self, value: Any) -> Validation: """ Interface for field validation. diff --git a/conformity/fields/email.py b/conformity/fields/email.py index 469852b..78a9cb7 100644 --- a/conformity/fields/email.py +++ b/conformity/fields/email.py @@ -128,5 +128,6 @@ def introspect(self) -> Introspection: domain_whitelist = sorted(self.domain_whitelist) return strip_none({ + **super().introspect(), 'domain_whitelist': domain_whitelist, - }).update(super().introspect()) + }) diff --git a/conformity/fields/legacy.py b/conformity/fields/legacy.py index 2b0b069..d427656 100644 --- a/conformity/fields/legacy.py +++ b/conformity/fields/legacy.py @@ -7,6 +7,7 @@ from conformity.fields.base import BaseField from conformity.fields.meta import ( + All, Constant, Instance, Type, @@ -71,7 +72,8 @@ class Null(Constant): Legacy field that is shorthand for Constant(None, ...) """ def __init__(self, **kwargs): - super().__init__(None, **kwargs) + # NOTE: Ignoring typing, since Mypy thinks None isn't hashable (it is) + super().__init__(None, **kwargs) # type: ignore class Nullable(BaseField): @@ -103,8 +105,9 @@ def validate(self, value: Any) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'nullable': self.field.introspect(), - }).update(super().introspect()) + }) class SchemalessDictionary(Dictionary, Sized): @@ -129,15 +132,17 @@ def __init__( super().__init__((key_type, value_type), **kwargs) -class UnicodeDecimal(String, Decimal): +class UnicodeDecimal(All): """ Validates that the value is a string that is also a valid decimal and can successfully be converted to a `decimal.Decimal`. """ - valid_noun = 'a unicode decimal' introspect_type = 'unicode_decimal' + def __init__(self, **kwargs): + super().__init__(String(), Decimal(), **kwargs) + # Deprecated Conformity 1.x aliases BooleanValidator = Validator diff --git a/conformity/fields/meta.py b/conformity/fields/meta.py index 9f51495..d5b92fb 100644 --- a/conformity/fields/meta.py +++ b/conformity/fields/meta.py @@ -1,11 +1,11 @@ -import abc +from collections import abc from typing import ( Any as AnyType, Callable, Hashable, Mapping, Tuple, - TypeType, + Type as TypeType, Union, ) @@ -82,7 +82,7 @@ def _repr(cv): ) def validate(self, value: AnyType) -> Validation: - v = super().validate() + v = super().validate(value) try: is_valid = value in self.values @@ -97,13 +97,14 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'values': [ s if isinstance(s, (str, bool, int, float, type(None))) else str(s) for s in sorted(self.values, key=str) ], - }).update(super().introspect()) + }) class Polymorph(BaseTypeField): @@ -175,12 +176,13 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'switch_field': self.switch_field, 'contents_map': { key: value.introspect() for key, value in self.contents_map.items() }, - }).update(super().introspect()) + }) class Instance(BaseField): @@ -215,8 +217,9 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'valid_type': repr(self.valid_type), - }).update(super().introspect()) + }) class Type(BaseTypeField): @@ -268,8 +271,9 @@ def introspect(self) -> Introspection: base_classes = [repr(c) for c in self.base_classes] return strip_none({ + **super().introspect(), 'base_classes': base_classes, - }).update(super().introspect()) + }) class Any(BaseField): @@ -309,8 +313,9 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'options': [option.introspect() for option in self.options], - }).update(super().introspect()) + }) class All(BaseField): @@ -341,8 +346,9 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'requirements': [field.introspect() for field in self.requirements], - }).update(super().introspect()) + }) class Chain(BaseField): @@ -376,8 +382,9 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'fields': [field.introspect() for field in self.fields], - }).update(super().introspect()) + }) class Validator(BaseField): @@ -391,25 +398,25 @@ class Validator(BaseField): def __init__( self, validator: Callable[[AnyType], bool], - * + *, validator_description: str, error: str, **kwargs ) -> None: super().__init__(**kwargs) - # Validate arguments - if not isinstance(validator, callable): - raise TypeError('validator argument must be a callable') - if not isinstance(validator_description, str): - raise TypeError('validator_description must be a string') - if not isinstance(error, str): - raise TypeError('error must be a string') - self.validator = validator self.validator_description = validator_description self.error = error + # Validate arguments + if not callable(self.validator): + raise TypeError('validator argument must be callable') + if not isinstance(self.validator_description, str): + raise TypeError('validator_description must be a string') + if not isinstance(self.error, str): + raise TypeError('error must be a string') + def validate(self, value: AnyType) -> Validation: v = super().validate(value) if v.errors: @@ -430,5 +437,6 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'validator': self.validator_description, - }).update(super().introspect()) + }) diff --git a/conformity/fields/protocols.py b/conformity/fields/protocols.py index 283cf66..07a3741 100644 --- a/conformity/fields/protocols.py +++ b/conformity/fields/protocols.py @@ -2,8 +2,8 @@ import numbers from typing import ( Any, - HashableType, - IterableType, + Hashable as HashableType, + Iterable as IterableType, Tuple, TypeVar, ) @@ -33,16 +33,21 @@ ) -T = TypeVar['T'] +T = TypeVar('T') -class Callable(BaseTypeField): +class Callable(BaseField): """ Validates that the value is callable """ - valid_type = abc.Callable - valid_noun = 'callable' + introspect_type = 'callable' + + def validate(self, value: Any) -> Validation: + v = super().validate(value) + if not v.errors and not callable(value): + v.errors.append(Error('Value is not a callable')) + return v class Container(BaseTypeField): @@ -109,7 +114,9 @@ def __init__( def validate(self, value: Any) -> Validation: v = super().validate(value) if not self.allow_boolean and isinstance(value, bool): - v.errors.append('Value is not {}'.format(self.valid_noun)) + v.errors.append(Error( + 'Value is not {}'.format(self.valid_noun), + )) if v.is_valid(): if self.gt is not None and value <= self.gt: @@ -124,11 +131,12 @@ def validate(self, value: Any) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'gt': self.gt, 'gte': self.gte, 'lt': self.lt, 'lte': self.lte, - }).update(super().introspect()) + }) class Sized(BaseTypeField): @@ -164,24 +172,25 @@ def validate(self, value: Any) -> Validation: if not v.errors: value_len = len(value) if self.min_length is not None and value_len < self.min_length: - v.errors.append( + v.errors.append(Error( 'Value must have a length of at least {}'.format( self.min_length, ), - ) + )) elif self.max_length is not None and value_len > self.max_length: - v.errors.append( + v.errors.append(Error( 'Value must have a length of no more than {}'.format( self.max_length, ), - ) + )) return v def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'min_length': self.min_length, 'max_length': self.max_length, - }).update(super().introspect()) + }) class Collection(Sized): @@ -224,8 +233,9 @@ def _enumerate( def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'contents': self.contents.introspect(), - }).update(super().introspect()) + }) class Sequence(Collection): diff --git a/conformity/fields/simple.py b/conformity/fields/simple.py index 9097232..6cd6628 100644 --- a/conformity/fields/simple.py +++ b/conformity/fields/simple.py @@ -91,8 +91,9 @@ def validate(self, value: Any) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'allow_blank': self.allow_blank, - }).update(super().introspect()) + }) class Bytes(Sized): diff --git a/conformity/fields/structures.py b/conformity/fields/structures.py index 6bdcf36..86202c4 100644 --- a/conformity/fields/structures.py +++ b/conformity/fields/structures.py @@ -1,10 +1,13 @@ import abc from typing import ( Any, + Dict as DictType, Hashable as HashableType, Iterable, List as ListType, Optional as OptionalType, + Tuple as TupleType, + Union, ) from conformity.constants import ( @@ -37,6 +40,11 @@ 'Tuple', ) +# Type aliases +_DictContents = DictType[HashableType, BaseField] +_TupleContents = TupleType[Union[HashableType, BaseField], BaseField] +_Contents = Union[_DictContents, _TupleContents, 'Dictionary'] + class List(Collection): """ @@ -53,166 +61,142 @@ class Dictionary(BaseTypeField): (`contents`). Keys are required unless they are listed in the `optional_keys` argument. No extra keys are allowed unless the `allow_extra_keys` argument is set to `True`. - - If the `contents` argument is an instance of `OrderedDict`, the field - introspection will include a `display_order` list of keys matching the order - they exist in the `OrderedDict`, and errors will be reported in the order - the keys exist in the `OrderedDict`. Order will be maintained for any calls - to `extend` as long as those calls also use `OrderedDict`. Ordering behavior - is undefined otherwise. This field does NOT enforce that the value it - validates presents keys in the same order. `OrderedDict` is used strictly - for documentation and error-object-ordering purposes only. """ valid_type = dict valid_noun = 'a dictionary' introspect_type = 'dictionary' - # Deprecated class var method - contents = None # type: OptionalType[BaseField] - optional_keys = None # type: OptionalType[bool] - allow_extra_keys = False # type: bool - # TODO: add __class__.description and __init__ processing? - def __init__( self, - *contents, + *contents: _Contents, optional_keys: Iterable[HashableType] = None, allow_extra_keys: bool = False, **kwargs: Any ) -> None: super.__init__(**kwargs) - if ( - contents is None and - getattr(self.__class__, 'contents', None) is not None - ): - # If no contents were provided but a subclass has hard-coded - # contents, use those - contents = self.__class__.contents - if contents is None: + self.contents = [] # type: ListType[TupleType[BaseField, BaseField]] + self._constant_fields = {} # type: Dict[HashableType, Dict[str, BaseField]] + self._variable_fields = [] # type: ListType[TupleType[BaseField, BaseField]] + + if not contents: # If there are still no contents, raise an error raise ValueError("'contents' is a required argument") # Build complete key/value field list - item_fields = [] + temp_contents = [] # type: ListType[TupleType[Any, Any]] for fields in contents: if isinstance(fields, Dictionary): - fields = fields.contents.items() + # fields is a Dictionary instance, which is already valid + self.contents.extend(fields.contents) + self._constant_fields.update(fields._constant_fields) + self._variable_fields.extend(fields._variable_fields) elif isinstance(fields, dict): - fields = fields.items() - elif not isinstance(fields, abc.Iterable): + temp_contents.extend(fields.items()) + elif isinstance(fields, tuple) and len(fields) == 2: + temp_contents.append(fields) + else: raise TypeError( 'Positional arguments must be either a Dictionary instance, ' - 'a dict instance, or an iterable of (key, value) tuples' + 'a dict instance, or a 2-tuple' ) - item_fields.extend(fields) # Validate optional keys - # TODO: handle __class__.optional_keys if optional_keys is None: optional_keys = () elif not isinstance(optional_keys, abc.Iterable): raise ValueError("'optional_keys' must be an iterable") optional_keys = frozenset(optional_keys) - # Validate each key/value field pair - self._constant_fields = {} - self._variable_fields = [] - for key_field, value_field in item_fields: - # Convert hashable builtin type instances to Literals (i.e., constants) - if isinstance(key_field, LITERAL_TYPES): - key_field = Literal(key_field) - if isinstance(value_field, LITERAL_TYPES): - value_field = Literal(value_field) - - # Validate key/value field types - if not isinstance(key_field, Hashable): - raise ValueError( - 'Dictionary key field must be a Conformity Hashable field' - ) + # Validate and process each key/value field pair + for key_field, value_field in temp_contents: + # Validate fields + if not isinstance(key_field, BaseField): + if isinstance(key_field, Hashable): + # Convert immutable, hashable types to Constant fields + if key_field in optional_keys: + key_field = Optional(key_field) + else: + key_field = Constant(key_field) + else: + raise TypeError( + 'Key field must be a Conformity field or hashable' + ) if not isinstance(value_field, BaseField): - raise ValueError( - 'Dictionary value fields must be a Conformity field' - ) + raise TypeError('Value fields must be Conformity fields') - if isinstance(key_field, Literal): - if key_field.value in optional_keys: - self._variable_fields.append(Optional(key_field), value_field) - else: - self._constant_fields[key_field.value] = value_field + # Sort fields + if isinstance(key_field, Constant): + self._constant_fields[key_field.value] = value_field else: self._variable_fields.append((key_field, value_field)) + self.contents = temp_contents + # Validate allow_extra_keys # TODO: add __class__.allow_extra_keys handling if not isinstance(allow_extra_keys, bool): raise TypeError("'allow_extra_keys' must be a boolean") if allow_extra_keys: # Add a variable field that accepts anything - self._variable_fields.append((Hashable(), Anything())) + self._variable_fields.append((Optional(Hashable()), Anything())) def validate(self, value: Any) -> Validation: - v = super().validate(value) + v = super().validate(d_value) if v.errors: + # Not a dict return v # Validate items - for d_key, d_value in value.items(): - if d_key in self._constant_fields: - # Validate constant key field - value_field = self._constant_fields[d_key] - value_v = value_field.validate(d_value) + # NOTE: INCOMPLETE + # TODO: finish this. Particularly, figure out what to do if a dictionary + # item matches multiple required content field pairs. + # + # This is effectively Any(Chain(key_field, value_field), ...) for each + # key/value pair. Should it behave identically? + for key_field, value_field in self.contents: + if isinstance(key_field, Constant): + # See if a constant value is in the dictionary + di_key = None + di_value = None + for c_value in key_field.values: + if c_value in value: + # Found a valid key + di_key = c_value + di_value = value[di_key] + break + + if di_key is not None: + v.extend(key_field.validate(di_key), pointer=di_key) + v.extend(value_field.validate(di_value), pointer=di_key) + else: + # Key not found + if not getattr(key_field, 'optional', False): + # TODO: handle missing required key + pass else: - # Validate variable key field - # TODO: extend warnings - key_valid = False - key_errors = [] - value_valid = False - value_errors = [] - for key_field, value_field in self._variable_fields: - key_v = key_field.validate(d_key) - if key_v.errors: - if not key_valid: - key_errors = [] - else: - key_valid = True - value_v = value_field.validate(d_value) - if value_v.errors: - if not value_valid: - value_errors.extend(value_v.errors) - else: - value_valid = True - - # result = [] - # for key, field in self.contents.items(): - # # Check key is present - # if key not in value: - # if key not in self.optional_keys: - # result.append( - # Error('Missing key: {}'.format(key), code=ERROR_CODE_MISSING, pointer=str(key)), - # ) - # else: - # # Check key type - # result.extend( - # #pdate_pointer(error, key) - # for error in (field.errors(value[key]) or []) - # ) - # # Check for extra keys - # extra_keys = set(value.keys()) - set(self.contents.keys()) - # if extra_keys and not self.allow_extra_keys: - # result.append( - # Error( - # 'Extra keys present: {}'.format(', '.join(str(key) for key in sorted(extra_keys))), - # code=ERROR_CODE_UNKNOWN, - # ), - # ) + # Variable field + # TODO: Record "unknown" key error if key matches no key field + # If only key valid, merge all value validations + # If key/value pair valid, break and merge key and value + # validations for the pair + key_found = False + for di_key, di_value in value.items(): + key_v = key_field.validate(di_key) + if not key_v.errors: + key_found = True + # v.extend(key_v, pointer=di_key) + value_v = value_field.validate(di_value) + if not value_v.errors: + # Found a valid pair + break return v def extend( self, - *contents, + *contents: _Contents, optional_keys: Iterable[HashableType] = None, allow_extra_keys: bool = None, description: str = None, diff --git a/conformity/fields/temporal.py b/conformity/fields/temporal.py index 17cbcd0..b8edf69 100644 --- a/conformity/fields/temporal.py +++ b/conformity/fields/temporal.py @@ -2,6 +2,7 @@ from typing import ( Any as AnyType, Generic, + Optional, TypeVar, ) @@ -22,16 +23,6 @@ ) -try: - # noinspection PyUnresolvedReferences - from freezegun import api as _freeze - DATETIME_TYPES = (datetime.datetime, _freeze.FakeDatetime) - DATE_TYPES = (datetime.date, _freeze.FakeDate) -except ImportError: - DATETIME_TYPES = datetime.datetime - DATE_TYPES = datetime.date - - T = TypeVar('T', datetime.date, datetime.time, datetime.datetime, datetime.timedelta) @@ -40,11 +31,6 @@ class TemporalBase(Generic[T], BaseTypeField): Common base class for all temporal types. Cannot be used on its own without extension. """ - # These three must be overridden - valid_type = None - valid_noun = None - introspect_type = None - def __init__( self, *, @@ -61,7 +47,7 @@ def __init__( self.lte = self.validate_parameter('lte', lte) @classmethod - def validate_parameter(cls, name: str, value: T) -> T: + def validate_parameter(cls, name: str, value: Optional[T]) -> Optional[T]: if value is not None and not isinstance(value, cls.valid_type): raise TypeError(( "'{}' value {!r} cannot be used for " @@ -87,33 +73,34 @@ def validate(self, value: AnyType) -> Validation: def introspect(self) -> Introspection: return strip_none({ + **super().introspect(), 'gt': str(self.gt) if self.gt else None, 'gte': str(self.gte) if self.gte else None, 'lt': str(self.lt) if self.lt else None, 'lte': str(self.lte) if self.lte else None, - }).update(super().introspect()) + }) -class DateTime(TemporalBase[DATETIME_TYPES]): +class DateTime(TemporalBase[datetime.datetime]): """ Validates that the value is a `datetime.datetime` instance and optionally enforces boundaries for that `datetime` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `datetime` instances if specified. """ - valid_type = DATETIME_TYPES + valid_type = datetime.datetime valid_noun = 'a datetime.datetime' introspect_type = 'datetime' -class Date(TemporalBase[DATE_TYPES]): +class Date(TemporalBase[datetime.date]): """ Validates that the value is a `datetime.date` instance and optionally enforces boundaries for that `date` with the `gt`, `gte`, `lt`, and `lte` arguments, which must also be `date` instances if specified. """ - valid_type = DATE_TYPES + valid_type = datetime.date valid_noun = 'a datetime.date' introspect_type = 'date' diff --git a/conformity/types.py b/conformity/types.py index 3644c73..271ace5 100644 --- a/conformity/types.py +++ b/conformity/types.py @@ -22,7 +22,7 @@ class Issue: """ Represents an issue found during validation of a value. """ - def __init__(self, message: str, pointer: str = None) -> None: + def __init__(self, message: str, *, pointer: str = None) -> None: self.message = message self.pointer = pointer @@ -33,11 +33,11 @@ class Error(Issue): """ def __init__( self, - *, + *args, code: str = None, **kwargs: Any ): - super().__init__(**kwargs) + super().__init__(*args, **kwargs) self.code = code or ERROR_CODE_INVALID @@ -47,11 +47,11 @@ class Warning(Issue): """ def __init__( self, - *, + *args, code: str = None, **kwargs: Any ): - super().__init__(**kwargs) + super().__init__(*args, **kwargs) self.code = code or WARNING_CODE_WARNING