diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c306821c69..52084de038 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -224,7 +224,7 @@ jobs: CIBW_SKIP: '*-win32' CIBW_PLATFORM: '${{ matrix.platform || matrix.os }}' CIBW_BEFORE_BUILD: 'pip install -U cython' - CIBW_TEST_REQUIRES: 'pytest==5.4.1' + CIBW_TEST_REQUIRES: 'pytest==6.1.1 pytest-mock==3.3.1' CIBW_TEST_COMMAND: 'pytest {project}/tests' CIBW_MANYLINUX_X86_64_IMAGE: 'manylinux2014' CIBW_MANYLINUX_I686_IMAGE: 'manylinux2014' diff --git a/MANIFEST.in b/MANIFEST.in index 5f8d242f0c..6c7fdcaa0d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,6 @@ include LICENSE include README.md include HISTORY.md +graft tests +global-exclude __pycache__ +global-exclude *.py[cod] diff --git a/changes/1352-brianmaissy.md b/changes/1352-brianmaissy.md new file mode 100644 index 0000000000..f87b1cd4ae --- /dev/null +++ b/changes/1352-brianmaissy.md @@ -0,0 +1 @@ +Add a `__call__` stub to `PyObject` so that mypy will know that it is callable diff --git a/changes/1736-PrettyWood.md b/changes/1736-PrettyWood.md new file mode 100644 index 0000000000..64a22abfc9 --- /dev/null +++ b/changes/1736-PrettyWood.md @@ -0,0 +1 @@ +Fix behaviour with forward refs and optional fields in nested models \ No newline at end of file diff --git a/changes/1767-PrettyWood.md b/changes/1767-PrettyWood.md new file mode 100644 index 0000000000..7262d7eca7 --- /dev/null +++ b/changes/1767-PrettyWood.md @@ -0,0 +1 @@ +add basic support of Pattern type in schema generation diff --git a/changes/1770-selimb.md b/changes/1770-selimb.md new file mode 100644 index 0000000000..bfd7284dfc --- /dev/null +++ b/changes/1770-selimb.md @@ -0,0 +1 @@ +Fix false positive from mypy plugin when a class nested within a `BaseModel` is named `Model`. diff --git a/changes/1794-mdavis-xyz.md b/changes/1794-mdavis-xyz.md new file mode 100644 index 0000000000..d4218eae22 --- /dev/null +++ b/changes/1794-mdavis-xyz.md @@ -0,0 +1,2 @@ +Clarify documentation for `parse_file` to show that the argument +should be a file *path* not a file-like object. diff --git a/changes/1803-PrettyWood.md b/changes/1803-PrettyWood.md new file mode 100644 index 0000000000..f7671cb7a1 --- /dev/null +++ b/changes/1803-PrettyWood.md @@ -0,0 +1 @@ +Support home directory relative paths for `dotenv` files (e.g. `~/.env`). \ No newline at end of file diff --git a/changes/1812-PrettyWood.md b/changes/1812-PrettyWood.md new file mode 100644 index 0000000000..dd0f3aa13f --- /dev/null +++ b/changes/1812-PrettyWood.md @@ -0,0 +1 @@ +add `parse_raw_as` utility function \ No newline at end of file diff --git a/changes/1821-KimMachineGun.md b/changes/1821-KimMachineGun.md new file mode 100644 index 0000000000..418c92e7ce --- /dev/null +++ b/changes/1821-KimMachineGun.md @@ -0,0 +1 @@ +Fix typo in the anchor of exporting_models.md#modelcopy and incorrect description. \ No newline at end of file diff --git a/changes/1957-hmvp.md b/changes/1957-hmvp.md new file mode 100644 index 0000000000..7dad0859d9 --- /dev/null +++ b/changes/1957-hmvp.md @@ -0,0 +1 @@ +Fix const validators not running when custom validators are present diff --git a/changes/1976-sbraz.md b/changes/1976-sbraz.md new file mode 100644 index 0000000000..085149fb56 --- /dev/null +++ b/changes/1976-sbraz.md @@ -0,0 +1 @@ +Include tests in source distributions. diff --git a/changes/1981-daviskirk.md b/changes/1981-daviskirk.md new file mode 100644 index 0000000000..ef52cb123b --- /dev/null +++ b/changes/1981-daviskirk.md @@ -0,0 +1 @@ +Force `fields.Undefined` to be a singleton object, fixing inherited generic model schemas diff --git a/docs/build/exec_examples.py b/docs/build/exec_examples.py index 2f167f5278..396c424d71 100755 --- a/docs/build/exec_examples.py +++ b/docs/build/exec_examples.py @@ -207,6 +207,13 @@ def error(desc: str): else: lines = lines[ignore_above + 1 :] + try: + ignore_below = lines.index('# ignore-below') + except ValueError: + pass + else: + lines = lines[:ignore_below] + lines = '\n'.join(lines).split('\n') if any(len(l) > MAX_LINE_LENGTH for l in lines): error(f'lines longer than {MAX_LINE_LENGTH} characters') diff --git a/docs/build/schema_mapping.py b/docs/build/schema_mapping.py index 0972c4b9a3..f9731c0a00 100755 --- a/docs/build/schema_mapping.py +++ b/docs/build/schema_mapping.py @@ -145,6 +145,13 @@ 'JSON Schema Validation', '' ], + [ + 'Pattern', + 'string', + {'format': 'regex'}, + 'JSON Schema Validation', + '' + ], [ 'bytes', 'string', diff --git a/docs/examples/models_parse.py b/docs/examples/models_parse.py index 5cdcf554fa..004ca08d65 100644 --- a/docs/examples/models_parse.py +++ b/docs/examples/models_parse.py @@ -1,5 +1,7 @@ import pickle from datetime import datetime +from pathlib import Path + from pydantic import BaseModel, ValidationError @@ -30,3 +32,11 @@ class User(BaseModel): pickle_data, content_type='application/pickle', allow_pickle=True ) print(m) + +path = Path('data.json') +path.write_text('{"id": 123, "name": "James"}') +m = User.parse_file(path) +print(m) +# ignore-below +if path.exists(): + path.unlink() diff --git a/docs/index.md b/docs/index.md index debca0ec19..50234d33c1 100644 --- a/docs/index.md +++ b/docs/index.md @@ -55,12 +55,16 @@ What's going on here: * `signup_ts` は datetime フィールドです (指定されていない場合は `None` 値を取ります)。 *pydantic* は Unix タイムスタンプ (例: `1496498400`) または日付と時刻を表す文字列のいずれかを処理します。 * `friends` は Python の typing を使用しており、入力のリストが必要です。`id` のように、 diff --git a/docs/mypy_plugin.md b/docs/mypy_plugin.md index 444d66dfd1..62b449d15b 100644 --- a/docs/mypy_plugin.md +++ b/docs/mypy_plugin.md @@ -151,12 +151,16 @@ There are other benefits too! See below for more details. --> * `@pydantic.dataclasess.dataclass` デコレータは `Config` サブクラスと同じ意味を持つ `config` キーワード引数を受け入れます。 +<<<<<<< HEAD ### オプション機能 #### 必須の動的エイリアスの使用を防止 @@ -294,7 +298,6 @@ A `mypy.ini` file with all plugin strictness flags enabled (and some other mypy plugins = pydantic.mypy follow_imports = silent -strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True diff --git a/docs/requirements.txt b/docs/requirements.txt index b7ed3b193b..91b6322f16 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,10 +1,10 @@ ansi2html==1.5.2 mkdocs==1.1.2 -markdown==3.2.2 +markdown==3.3 mkdocs-exclude==1.0.2 -mkdocs-material==5.5.12 +mkdocs-material==6.0.2 markdown-include==0.6.0 -pygments==2.6.1 +pygments==2.7.1 sqlalchemy # pyup: ignore orjson # pyup: ignore ujson # pyup: ignore diff --git a/docs/usage/models.md b/docs/usage/models.md index 3365a181fc..69c07c56e1 100644 --- a/docs/usage/models.md +++ b/docs/usage/models.md @@ -71,7 +71,7 @@ Models possess the following methods and attributes: cf. [exporting models](exporting_models.md#modeljson) `copy()` -: returns a deep copy of the model; cf. [exporting models](exporting_models.md#modeldcopy) +: returns a copy (by default, shallow copy) of the model; cf. [exporting models](exporting_models.md#modelcopy) `parse_obj()` : a utility for loading any object into a model with error handling if the object is not a dictionary; @@ -81,7 +81,7 @@ Models possess the following methods and attributes: : a utility for loading strings of numerous formats; cf. [helper functions](#helper-functions) `parse_file()` -: like `parse_raw()` but for files; cf. [helper function](#helper-functions) +: like `parse_raw()` but for file paths; cf. [helper function](#helper-functions) `from_orm()` : loads data into a model from an arbitrary class; cf. [ORM mode](#orm-mode-aka-arbitrary-class-instances) @@ -238,7 +238,7 @@ _(This script is complete, it should run "as is")_ rather than keyword arguments. If the object passed is not a dict a `ValidationError` will be raised. * **`parse_raw`**: this takes a *str* or *bytes* and parses it as *json*, then passes the result to `parse_obj`. Parsing *pickle* data is also supported by setting the `content_type` argument appropriately. -* **`parse_file`**: this reads a file and passes the contents to `parse_raw`. If `content_type` is omitted, +* **`parse_file`**: this takes in a file path, reads the file and passes the contents to `parse_raw`. If `content_type` is omitted, it is inferred from the file's extension. ```py @@ -533,8 +533,8 @@ _(This script is complete, it should run "as is")_ This function is capable of parsing data into any of the types pydantic can handle as fields of a `BaseModel`. -Pydantic also includes a similar standalone function called `parse_file_as`, -which is analogous to `BaseModel.parse_file`. +Pydantic also includes two similar standalone functions called `parse_file_as` and `parse_raw_as`, +which are analogous to `BaseModel.parse_file` and `BaseModel.parse_raw`. ## Data Conversion diff --git a/docs/usage/validators.md b/docs/usage/validators.md index 4db5fef4d6..339238bd7c 100644 --- a/docs/usage/validators.md +++ b/docs/usage/validators.md @@ -53,7 +53,7 @@ A few more things to note: ## Subclass Validators and `each_item` If using a validator with a subclass that references a `List` type field on a parent class, using `each_item=True` will -cause the validator not to run; instead, the list must be iterated over programatically. +cause the validator not to run; instead, the list must be iterated over programmatically. ```py {!.tmp_examples/validators_subclass_each_item.py!} diff --git a/pydantic/__init__.py b/pydantic/__init__.py index 53f4f4f858..1a3cabb8c0 100644 --- a/pydantic/__init__.py +++ b/pydantic/__init__.py @@ -56,6 +56,7 @@ # tools 'parse_file_as', 'parse_obj_as', + 'parse_raw_as', # types 'NoneStr', 'NoneBytes', diff --git a/pydantic/env_settings.py b/pydantic/env_settings.py index 59ed44b0de..6846e10209 100644 --- a/pydantic/env_settings.py +++ b/pydantic/env_settings.py @@ -58,7 +58,7 @@ def _build_environ( env_file = _env_file if _env_file != env_file_sentinel else self.__config__.env_file env_file_encoding = _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding if env_file is not None: - env_path = Path(env_file) + env_path = Path(env_file).expanduser() if env_path.is_file(): env_vars = { **read_env_file( diff --git a/pydantic/fields.py b/pydantic/fields.py index 01792b628b..2619b4b167 100644 --- a/pydantic/fields.py +++ b/pydantic/fields.py @@ -1,6 +1,5 @@ import warnings from collections.abc import Iterable as CollectionsIterable -from copy import deepcopy from typing import ( TYPE_CHECKING, Any, @@ -36,16 +35,24 @@ is_new_type, new_type_supertype, ) -from .utils import PyObjectStr, Representation, lenient_issubclass, sequence_like +from .utils import PyObjectStr, Representation, lenient_issubclass, sequence_like, smart_deepcopy from .validators import constant_validator, dict_validator, find_validators, validate_json Required: Any = Ellipsis +T = TypeVar('T') + class UndefinedType: def __repr__(self) -> str: return 'PydanticUndefined' + def __copy__(self: T) -> T: + return self + + def __deepcopy__(self: T, _: Any) -> T: + return self + Undefined = UndefinedType() @@ -129,7 +136,7 @@ def Field( **extra: Any, ) -> Any: """ - Used to provide extra information about a field, either for the model schema or complex valiation. Some arguments + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. :param default: since this is replacing the field’s default, its first argument is used @@ -271,14 +278,7 @@ def __init__( self.prepare() def get_default(self) -> Any: - if self.default_factory is not None: - value = self.default_factory() - elif self.default is None: - # deepcopy is quite slow on None - value = None - else: - value = deepcopy(self.default) - return value + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() @classmethod def infer( @@ -342,6 +342,11 @@ def prepare(self) -> None: """ self._set_default_and_type() + if self.type_.__class__ == ForwardRef: + # self.type_ is currently a ForwardRef and there's nothing we can do now, + # user will need to call model.update_forward_refs() + return + self._type_analysis() if self.required is Undefined: self.required = True @@ -374,11 +379,6 @@ def _set_default_and_type(self) -> None: if self.type_ is None: raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') - if self.type_.__class__ == ForwardRef: - # self.type_ is currently a ForwardRef and there's nothing we can do now, - # user will need to call model.update_forward_refs() - return - if self.required is False and default_value is None: self.allow_none = True @@ -539,7 +539,7 @@ def populate_validators(self) -> None: if class_validators_: self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) - self.post_validators = prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) + self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) if self.parse_json: self.pre_validators.append(make_generic_validator(validate_json)) diff --git a/pydantic/main.py b/pydantic/main.py index c872f1e3b5..c6c486adae 100644 --- a/pydantic/main.py +++ b/pydantic/main.py @@ -42,6 +42,7 @@ generate_model_signature, lenient_issubclass, sequence_like, + smart_deepcopy, unique_list, validate_field_name, ) @@ -219,7 +220,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 pre_root_validators, post_root_validators = [], [] for base in reversed(bases): if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: - fields.update(deepcopy(base.__fields__)) + fields.update(smart_deepcopy(base.__fields__)) config = inherit_config(base.__config__, config) validators = inherit_validators(base.__validators__, validators) pre_root_validators += base.__pre_root_validators__ @@ -527,7 +528,7 @@ def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **valu Default values are respected, but no other validation is performed. """ m = cls.__new__(cls) - object.__setattr__(m, '__dict__', {**deepcopy(cls.__field_defaults__), **values}) + object.__setattr__(m, '__dict__', {**smart_deepcopy(cls.__field_defaults__), **values}) if _fields_set is None: _fields_set = set(values.keys()) object.__setattr__(m, '__fields_set__', _fields_set) @@ -558,6 +559,7 @@ def copy( ) if deep: + # chances of having empty dict here are quite low for using smart_deepcopy v = deepcopy(v) cls = self.__class__ diff --git a/pydantic/mypy.py b/pydantic/mypy.py index 4881d8d2ce..18d7ef090b 100644 --- a/pydantic/mypy.py +++ b/pydantic/mypy.py @@ -328,7 +328,7 @@ def add_construct_method(self, fields: List['PydanticModelField']) -> None: construct_arguments = [fields_set_argument] + construct_arguments obj_type = ctx.api.named_type('__builtins__.object') - self_tvar_name = 'Model' + self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) diff --git a/pydantic/schema.py b/pydantic/schema.py index 27c66b2bd3..15b5066785 100644 --- a/pydantic/schema.py +++ b/pydantic/schema.py @@ -14,6 +14,7 @@ Iterable, List, Optional, + Pattern, Sequence, Set, Tuple, @@ -618,6 +619,7 @@ def field_singleton_sub_fields_schema( (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), (IPv4Address, {'type': 'string', 'format': 'ipv4'}), (IPv6Address, {'type': 'string', 'format': 'ipv6'}), + (Pattern, {'type': 'string', 'format': 'regex'}), (str, {'type': 'string'}), (bytes, {'type': 'string', 'format': 'binary'}), (bool, {'type': 'boolean'}), @@ -643,7 +645,8 @@ def add_field_type_to_schema(field_type: Any, schema: Dict[str, Any]) -> None: and then modifies the given `schema` with the information from that type. """ for type_, t_schema in field_class_to_schema: - if issubclass(field_type, type_): + # Fallback for `typing.Pattern` as it is not a valid class + if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: schema.update(t_schema) break diff --git a/pydantic/tools.py b/pydantic/tools.py index 0f70ea6bdd..d0d320b5aa 100644 --- a/pydantic/tools.py +++ b/pydantic/tools.py @@ -3,11 +3,11 @@ from pathlib import Path from typing import Any, Callable, Optional, Type, TypeVar, Union -from pydantic.parse import Protocol, load_file - +from .parse import Protocol, load_file, load_str_bytes +from .types import StrBytes from .typing import display_as_type -__all__ = ('parse_file_as', 'parse_obj_as') +__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as') NameFactory = Union[str, Callable[[Type[Any]], str]] @@ -55,3 +55,25 @@ def parse_file_as( json_loads=json_loads, ) return parse_obj_as(type_, obj, type_name=type_name) + + +def parse_raw_as( + type_: Type[T], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) diff --git a/pydantic/types.py b/pydantic/types.py index 6f60b312e5..79693dd3ec 100644 --- a/pydantic/types.py +++ b/pydantic/types.py @@ -317,6 +317,11 @@ def validate(cls, value: Any) -> Any: except ImportError as e: raise errors.PyObjectError(error_message=str(e)) + if TYPE_CHECKING: + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + ... + class ConstrainedNumberMeta(type): def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore diff --git a/pydantic/utils.py b/pydantic/utils.py index d14a2858a3..42a4d30e60 100644 --- a/pydantic/utils.py +++ b/pydantic/utils.py @@ -1,6 +1,9 @@ import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from copy import deepcopy from itertools import islice -from types import GeneratorType +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType from typing import ( TYPE_CHECKING, AbstractSet, @@ -20,7 +23,7 @@ no_type_check, ) -from .typing import display_as_type +from .typing import NoneType, display_as_type from .version import version_info if TYPE_CHECKING: @@ -50,6 +53,41 @@ 'ClassAttribute', ) +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: Set[Type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + def import_string(dotted_path: str) -> Any: """ @@ -534,3 +572,22 @@ def __get__(self, instance: Any, owner: Type[Any]) -> None: if instance is None: return self.value raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') + + +Obj = TypeVar('Obj') + + +def smart_deepcopy(obj: Obj) -> Obj: + """ + Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects + """ + + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + elif not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method + return deepcopy(obj) # slowest way when we actually might need a deepcopy diff --git a/tests/mypy/modules/plugin_success.py b/tests/mypy/modules/plugin_success.py index 8b0c6afd15..af4b8e376f 100644 --- a/tests/mypy/modules/plugin_success.py +++ b/tests/mypy/modules/plugin_success.py @@ -123,3 +123,13 @@ class AddProject: class TypeAliasAsAttribute(BaseModel): __type_alias_attribute__ = Union[str, bytes] + + +class NestedModel(BaseModel): + class Model(BaseModel): + id: str + + model: Model + + +_ = NestedModel.Model diff --git a/tests/mypy/modules/success.py b/tests/mypy/modules/success.py index a2d5fdef55..0ba4ae9625 100644 --- a/tests/mypy/modules/success.py +++ b/tests/mypy/modules/success.py @@ -5,10 +5,10 @@ """ import json import sys -from datetime import datetime +from datetime import date, datetime from typing import Any, Dict, Generic, List, Optional, TypeVar -from pydantic import BaseModel, NoneStr, StrictBool, root_validator, validate_arguments, validator +from pydantic import BaseModel, NoneStr, PyObject, StrictBool, root_validator, validate_arguments, validator from pydantic.fields import Field from pydantic.generics import GenericModel @@ -127,3 +127,13 @@ def foo(a: int, *, c: str = 'x') -> str: foo(1, c='thing') foo(1) + + +class MyConf(BaseModel): + str_pyobject: PyObject = Field('datetime.date') + callable_pyobject: PyObject = Field(date) + + +conf = MyConf() +var1: date = conf.str_pyobject(2020, 12, 20) +var2: date = conf.callable_pyobject(2111, 1, 1) diff --git a/tests/requirements.txt b/tests/requirements.txt index b7f098bb9d..b7a012b01d 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,12 +1,12 @@ black==20.8b1 -coverage==5.2.1 -flake8==3.8.3 +coverage==5.3 +flake8==3.8.4 flake8-quotes==3.2.0 -isort==5.5.1 +isort==5.6.1 mypy==0.782 pycodestyle==2.6.0 pyflakes==2.2.0 -pytest==6.0.1 +pytest==6.1.1 pytest-cov==2.10.1 pytest-mock==3.3.1 pytest-sugar==0.9.4 diff --git a/tests/test_forward_ref.py b/tests/test_forward_ref.py index d640dd7a5f..420079dfb1 100644 --- a/tests/test_forward_ref.py +++ b/tests/test_forward_ref.py @@ -439,3 +439,36 @@ class Foo(BaseModel): raise AssertionError('error not raised') """ ) + + +@skip_pre_37 +def test_forward_ref_optional(create_module): + module = create_module( + """ +from __future__ import annotations +from pydantic import BaseModel, Field +from typing import List, Optional + + +class Spec(BaseModel): + spec_fields: List[str] = Field(..., alias="fields") + filter: Optional[str] + sort: Optional[str] + + +class PSpec(Spec): + g: Optional[GSpec] + + +class GSpec(Spec): + p: Optional[PSpec] + +PSpec.update_forward_refs() + +class Filter(BaseModel): + g: Optional[GSpec] + p: Optional[PSpec] + """ + ) + Filter = module.Filter + assert isinstance(Filter(p={'sort': 'some_field:asc', 'fields': []}), Filter) diff --git a/tests/test_generics.py b/tests/test_generics.py index b1821e77b6..452fe3bb05 100644 --- a/tests/test_generics.py +++ b/tests/test_generics.py @@ -410,6 +410,26 @@ class MyModel(GenericModel, Generic[T]): assert schema['properties']['a'].get('description') == 'Custom' +@skip_36 +def test_child_schema(): + + T = TypeVar('T') + + class Model(GenericModel, Generic[T]): + a: T + + class Child(Model[T], Generic[T]): + pass + + schema = Child[int].schema() + assert schema == { + 'title': 'Child[int]', + 'type': 'object', + 'properties': {'a': {'title': 'A', 'type': 'integer'}}, + 'required': ['a'], + } + + @skip_36 def test_custom_generic_naming(): T = TypeVar('T') diff --git a/tests/test_main.py b/tests/test_main.py index 19d17e6d43..b19b6f9143 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -5,7 +5,18 @@ import pytest -from pydantic import BaseModel, ConfigError, Extra, Field, NoneBytes, NoneStr, Required, ValidationError, constr +from pydantic import ( + BaseModel, + ConfigError, + Extra, + Field, + NoneBytes, + NoneStr, + Required, + ValidationError, + constr, + validator, +) def test_success(): @@ -415,6 +426,27 @@ class Model(BaseModel): ] +def test_const_with_validator(): + class Model(BaseModel): + a: int = Field(3, const=True) + + @validator('a') + def validate(v): + return v + + with pytest.raises(ValidationError) as exc_info: + Model(a=4) + + assert exc_info.value.errors() == [ + { + 'loc': ('a',), + 'msg': 'unexpected value; permitted: 3', + 'type': 'value_error.const', + 'ctx': {'given': 4, 'permitted': [3]}, + } + ] + + def test_const_list(): class SubModel(BaseModel): b: int diff --git a/tests/test_settings.py b/tests/test_settings.py index d24aae349e..6e734481c3 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -1,4 +1,6 @@ import os +import uuid +from pathlib import Path from typing import Dict, List, Optional, Set import pytest @@ -437,21 +439,21 @@ def test_config_file_settings_nornir(env): """ class Settings(BaseSettings): - a: str - b: str - c: str + param_a: str + param_b: str + param_c: str def _build_values(self, init_kwargs, _env_file, _env_file_encoding): config_settings = init_kwargs.pop('__config_settings__') return {**config_settings, **init_kwargs, **self._build_environ()} - env.set('C', 'env setting c') + env.set('PARAM_C', 'env setting c') - config = {'a': 'config a', 'b': 'config b', 'c': 'config c'} - s = Settings(__config_settings__=config, b='argument b', c='argument c') - assert s.a == 'config a' - assert s.b == 'argument b' - assert s.c == 'env setting c' + config = {'param_a': 'config a', 'param_b': 'config b', 'param_c': 'config c'} + s = Settings(__config_settings__=config, param_b='argument b', param_c='argument c') + assert s.param_a == 'config a' + assert s.param_b == 'argument b' + assert s.param_c == 'env setting c' test_env_file = """\ @@ -547,6 +549,28 @@ class Config: assert s.pika == 'p!±@' +@pytest.fixture +def home_tmp(): + tmp_filename = f'{uuid.uuid4()}.env' + home_tmp_path = Path.home() / tmp_filename + yield home_tmp_path, tmp_filename + home_tmp_path.unlink() + + +@pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') +def test_env_file_home_directory(home_tmp): + home_tmp_path, tmp_filename = home_tmp + home_tmp_path.write_text('pika=baz') + + class Settings(BaseSettings): + pika: str + + class Config: + env_file = f'~/{tmp_filename}' + + assert Settings().pika == 'baz' + + @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_none(tmp_path): p = tmp_path / '.env' diff --git a/tests/test_tools.py b/tests/test_tools.py index 14854dd495..c1a13e68ca 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, ValidationError from pydantic.dataclasses import dataclass -from pydantic.tools import parse_file_as, parse_obj_as +from pydantic.tools import parse_file_as, parse_obj_as, parse_raw_as @pytest.mark.parametrize('obj,type_,parsed', [('1', int, 1), (['1'], List[int], [1])]) @@ -88,3 +88,13 @@ def custom_json_loads(*args, **kwargs): p = tmp_path / 'test_json_loads.json' p.write_text('{"1": "2"}') assert parse_file_as(Dict[int, int], p, json_loads=custom_json_loads) == {1: 99} + + +def test_raw_as(): + class Item(BaseModel): + id: int + name: str + + item_data = '[{"id": 1, "name": "My Item"}]' + items = parse_raw_as(List[Item], item_data) + assert items == [Item(id=1, name='My Item')] diff --git a/tests/test_types.py b/tests/test_types.py index 084b9d071a..6752c60722 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -2055,6 +2055,13 @@ class Foobar(BaseModel): f2 = Foobar(pattern=p) assert f2.pattern is p + assert Foobar.schema() == { + 'type': 'object', + 'title': 'Foobar', + 'properties': {'pattern': {'type': 'string', 'format': 'regex', 'title': 'Pattern'}}, + 'required': ['pattern'], + } + def test_pattern_error(): class Foobar(BaseModel): diff --git a/tests/test_utils.py b/tests/test_utils.py index d59f2e3ed9..989176c20c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,7 @@ import os import re import string +from copy import copy, deepcopy from distutils.version import StrictVersion from enum import Enum from typing import NewType, Union @@ -13,12 +14,14 @@ from pydantic.fields import Undefined from pydantic.typing import Literal, all_literal_values, display_as_type, is_new_type, new_type_supertype from pydantic.utils import ( + BUILTIN_COLLECTIONS, ClassAttribute, ValueItems, deep_update, get_model, import_string, lenient_issubclass, + smart_deepcopy, truncate, unique_list, ) @@ -293,6 +296,11 @@ def test_undefined_repr(): assert repr(Undefined) == 'PydanticUndefined' +def test_undefined_copy(): + copy(Undefined) is Undefined + deepcopy(Undefined) is Undefined + + def test_get_model(): class A(BaseModel): a: str @@ -347,3 +355,30 @@ def test_all_literal_values(): L312 = Literal['3', Literal[L1, L2]] assert sorted(all_literal_values(L312)) == sorted(('1', '2', '3')) + + +@pytest.mark.parametrize( + 'obj', + (1, 1.0, '1', b'1', int, None, test_all_literal_values, len, test_all_literal_values.__code__, lambda: ..., ...), +) +def test_smart_deepcopy_immutable_non_sequence(obj, mocker): + # make sure deepcopy is not used + # (other option will be to use obj.copy(), but this will produce error as none of given objects have this method) + mocker.patch('pydantic.utils.deepcopy', side_effect=RuntimeError) + assert smart_deepcopy(obj) is deepcopy(obj) is obj + + +@pytest.mark.parametrize('empty_collection', (collection() for collection in BUILTIN_COLLECTIONS)) +def test_smart_deepcopy_empty_collection(empty_collection, mocker): + mocker.patch('pydantic.utils.deepcopy', side_effect=RuntimeError) # make sure deepcopy is not used + if not isinstance(empty_collection, (tuple, frozenset)): # empty tuple or frozenset are always the same object + assert smart_deepcopy(empty_collection) is not empty_collection + + +@pytest.mark.parametrize( + 'collection', (c.fromkeys((1,)) if issubclass(c, dict) else c((1,)) for c in BUILTIN_COLLECTIONS) +) +def test_smart_deepcopy_collection(collection, mocker): + expected_value = object() + mocker.patch('pydantic.utils.deepcopy', return_value=expected_value) + assert smart_deepcopy(collection) is expected_value