-
-
Notifications
You must be signed in to change notification settings - Fork 1.7k
/
config.py
239 lines (190 loc) · 8.03 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
from __future__ import annotations as _annotations
import json
import warnings
from enum import Enum
from typing import TYPE_CHECKING, Any, Callable, ForwardRef
from typing_extensions import Literal, Protocol, TypedDict
from pydantic.errors import PydanticUserError
if TYPE_CHECKING:
from typing import overload
from .main import BaseModel
class SchemaExtraCallable(Protocol):
# TODO: This has been replaced with __pydantic_modify_json_schema__ in v2; need to make sure we
# document the migration, in particular changing `model_class` to `cls` from the classmethod
# TODO: Note that the argument to Field(...) that served a similar purpose received the FieldInfo as well.
# Should we accept that argument here too? Will that add a ton of boilerplate?
# Tentative suggestion to previous TODO: I think we let the json_schema_extra argument
# to FieldInfo be a callable that accepts schema, model_class, and field_info. And use
# similar machinery to `_apply_modify_schema` to call the function properly for different signatures.
# (And use this Protocol-based approach to get good type-checking.)
@overload
def __call__(self, schema: dict[str, Any]) -> None:
pass
@overload
def __call__(self, schema: dict[str, Any], model_class: type[BaseModel]) -> None:
pass
else:
SchemaExtraCallable = Callable[..., None]
__all__ = 'BaseConfig', 'ConfigDict', 'Extra', 'build_config', 'prepare_config'
class Extra(str, Enum):
allow = 'allow'
ignore = 'ignore'
forbid = 'forbid'
class _ConfigDict(TypedDict, total=False):
# TODO: We should raise a warning when building a model class if a now-invalid config key is present
title: str | None
str_to_lower: bool
str_to_upper: bool
str_strip_whitespace: bool
str_min_length: int
str_max_length: int | None
extra: Extra
frozen: bool
populate_by_name: bool
use_enum_values: bool
validate_assignment: bool
arbitrary_types_allowed: bool # TODO default True, or remove
undefined_types_warning: bool # TODO review docs
from_attributes: bool
alias_generator: Callable[[str], str] | None
ignored_types: tuple[type, ...] # TODO remove??
json_loads: Callable[[str], Any] # TODO decide
json_dumps: Callable[..., str] # TODO decide
json_encoders: dict[type[Any] | str | ForwardRef, Callable[..., Any]] # TODO decide
allow_inf_nan: bool
strict: bool
# whether inherited models as fields should be reconstructed as base model,
# and whether such a copy should be shallow or deep
copy_on_model_validation: Literal['none', 'deep', 'shallow'] # TODO remove???
# whether dataclass `__post_init__` should be run before or after validation
post_init_call: Literal['before_validation', 'after_validation'] # TODO remove
# new in V2
ser_json_timedelta: Literal['iso8601', 'float']
ser_json_bytes: Literal['utf8', 'base64']
config_keys = set(_ConfigDict.__annotations__.keys())
if TYPE_CHECKING:
class ConfigDict(_ConfigDict):
...
else:
class ConfigDict(dict):
def __missing__(self, key: str) -> Any:
if key in _default_config: # need this check to prevent a recursion error
return _default_config[key]
raise KeyError(key)
_default_config = ConfigDict(
title=None,
str_to_lower=False,
str_to_upper=False,
str_strip_whitespace=False,
str_min_length=0,
str_max_length=None,
extra=Extra.ignore,
frozen=False,
populate_by_name=False,
use_enum_values=False,
validate_assignment=False,
arbitrary_types_allowed=False,
undefined_types_warning=True,
from_attributes=False,
alias_generator=None,
ignored_types=(),
json_loads=json.loads,
json_dumps=json.dumps,
json_encoders={},
allow_inf_nan=True,
strict=False,
copy_on_model_validation='shallow',
post_init_call='before_validation',
ser_json_timedelta='iso8601',
ser_json_bytes='utf8',
)
class ConfigMetaclass(type):
def __getattr__(self, item: str) -> Any:
warnings.warn(
f'Support for "config" as "{self.__name__}" is deprecated and will be removed in a future version"',
DeprecationWarning,
)
try:
return _default_config[item] # type: ignore[literal-required]
except KeyError as exc:
raise AttributeError(f"type object '{self.__name__}' has no attribute {exc}")
class BaseConfig(metaclass=ConfigMetaclass):
"""
This class is only retained for backwards compatibility.
The preferred approach going forward is to assign a ConfigDict to the `model_config` attribute of the Model class.
"""
def __getattr__(self, item: str) -> Any:
warnings.warn(
f'Support for "config" as "{type(self).__name__}" is deprecated and will be removed in a future version',
DeprecationWarning,
)
try:
return super().__getattribute__(item)
except AttributeError as exc:
try:
return getattr(type(self), item)
except AttributeError:
# reraising changes the displayed text to reflect that `self` is not a type
raise AttributeError(str(exc))
def __init_subclass__(cls, **kwargs: Any) -> None:
warnings.warn(
'`BaseConfig` is deprecated and will be removed in a future version',
DeprecationWarning,
)
return super().__init_subclass__(**kwargs)
def get_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
if config is None:
return ConfigDict()
if isinstance(config, dict):
config_dict = config
else:
warnings.warn(
f'Support for "config" as "{type(config).__name__}" is deprecated and will be removed in a future version',
DeprecationWarning,
)
config_dict = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
return ConfigDict(config_dict) # type: ignore
def build_config(
cls_name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]
) -> ConfigDict:
"""
Build a new ConfigDict instance based on (from lowest to highest)
- options defined in base
- options defined in namespace
- options defined via kwargs
"""
config_kwargs = {k: kwargs.pop(k) for k in list(kwargs.keys()) if k in config_keys}
config_bases = {}
configs_ordered = []
# collect all config options from bases
for base in bases:
config = getattr(base, 'model_config', None)
if config:
configs_ordered.append(config)
config_bases.update({key: value for key, value in config.items()})
config_new = dict(config_bases.items())
config_class_from_namespace = namespace.get('Config')
config_dict_from_namespace = namespace.get('model_config')
if config_class_from_namespace and config_dict_from_namespace:
raise PydanticUserError('"Config" and "model_config" cannot be used together')
config_from_namespace = config_dict_from_namespace or get_config(config_class_from_namespace)
if config_from_namespace:
configs_ordered.append(config_from_namespace)
config_new.update(config_from_namespace)
configs_ordered.append(config_kwargs)
config_new.update(config_kwargs)
new_model_config = ConfigDict(config_new) # type: ignore
# merge `json_encoders`-dict in correct order
json_encoders = {}
for c in configs_ordered:
json_encoders.update(c.get('json_encoders', {}))
if json_encoders:
new_model_config['json_encoders'] = json_encoders
prepare_config(new_model_config, cls_name)
return new_model_config
def prepare_config(config: ConfigDict, cls_name: str) -> None:
if not isinstance(config['extra'], Extra):
try:
config['extra'] = Extra(config['extra'])
except ValueError:
raise ValueError(f'"{cls_name}": {config["extra"]} is not a valid value for "extra"')