Skip to content

Commit

Permalink
Merge pull request #626 from JWCook/cattrs-22
Browse files Browse the repository at this point in the history
(0.9) Add compatibility with cattrs 22.1
  • Loading branch information
JWCook committed May 4, 2022
2 parents 7d63e72 + f11fac4 commit 66550b5
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 53 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Expand Up @@ -24,7 +24,8 @@ package.json

# Test / coverage reports
.coverage
.tox
.coverage.*
.nox
.mypy_cache/
test-reports/

Expand Down
1 change: 1 addition & 0 deletions HISTORY.md
Expand Up @@ -2,6 +2,7 @@

## 0.9.5 (Unreleased)
* Fix usage of memory backend with `install_cache()`
* Add compatibility with cattrs 22.1

## 0.9.4 (2022-04-22)
* Fix forwarding connection parameters passed to `RedisCache` for redis-py 4.2 and python <=3.8
Expand Down
24 changes: 20 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion pyproject.toml
Expand Up @@ -33,7 +33,7 @@ requests = "^2.22" # Needs no introduction
urllib3 = "^1.25.5" # Use a slightly newer version than required by requests (for bugfixes)
appdirs = "^1.4.4" # For options that use platform-specific user cache dirs
attrs = "^21.2" # For response data models
cattrs = "^1.8" # For response serialization
cattrs = ">=1.8,<=22.2" # For response serialization
url-normalize = "^1.4" # For improved request matching

# Optional backend dependencies
Expand Down Expand Up @@ -62,6 +62,9 @@ sphinx-notfound-page = {optional=true, version="*"}
sphinx-panels = {optional=true, version="^0.6"}
sphinxcontrib-apidoc = {optional=true, version="^0.3"}

# Temporary fix until cattrs 22.2.0 is released
exceptiongroup = {version = ">=1.0.0-rc.3", python = "^3.10.0"}

[tool.poetry.extras]
# Package extras for optional backend dependencies
dynamodb = ["boto3", "botocore"]
Expand Down
119 changes: 72 additions & 47 deletions requests_cache/serializers/preconf.py
@@ -1,9 +1,10 @@
# flake8: noqa: F841
"""The ``cattrs`` library includes a number of `pre-configured converters
<https://cattrs.readthedocs.io/en/latest/preconf.html>`_ that perform some pre-serialization steps
required for specific serialization formats.
This module wraps those converters as serializer :py:class:`.Stage` objects. These are then used as
a stage in a :py:class:`.SerializerPipeline`, which runs after the base converter and before the
stages in a :py:class:`.SerializerPipeline`, which runs after the base converter and before the
format's ``dumps()`` (or equivalent) method.
For any optional libraries that aren't installed, the corresponding serializer will be a placeholder
Expand All @@ -13,70 +14,95 @@ class that raises an ``ImportError`` at initialization time instead of at import
:nosignatures:
"""
import pickle
from datetime import timedelta
from decimal import Decimal
from functools import partial
from importlib import import_module

from cattr.preconf import bson as bson_preconf
from cattr.preconf import json as json_preconf
from cattr.preconf import msgpack, orjson, pyyaml, tomlkit, ujson
from cattr import GenConverter

from .._utils import get_placeholder_class
from .cattrs import CattrStage
from .pipeline import SerializerPipeline, Stage

base_stage = (
CattrStage()
) #: Base stage for all serializer pipelines (or standalone dict serializer)
dict_serializer = base_stage #: Partial serializer that unstructures responses into dicts
bson_preconf_stage = CattrStage(bson_preconf.make_converter) #: Pre-serialization steps for BSON
json_preconf_stage = CattrStage(json_preconf.make_converter) #: Pre-serialization steps for JSON
msgpack_preconf_stage = CattrStage(msgpack.make_converter) #: Pre-serialization steps for msgpack
orjson_preconf_stage = CattrStage(orjson.make_converter) #: Pre-serialization steps for orjson
yaml_preconf_stage = CattrStage(pyyaml.make_converter) #: Pre-serialization steps for YAML
toml_preconf_stage = CattrStage(tomlkit.make_converter) #: Pre-serialization steps for TOML
ujson_preconf_stage = CattrStage(ujson.make_converter) #: Pre-serialization steps for ultrajson
pickle_serializer = SerializerPipeline(
[base_stage, pickle], is_binary=True
) #: Complete pickle serializer
utf8_encoder = Stage(dumps=str.encode, loads=lambda x: x.decode()) #: Encode to bytes

def make_stage(preconf_module: str, **kwargs):
"""Create a preconf serializer stage from a module name, if dependencies are installed"""
try:
factory = import_module(preconf_module).make_converter
return CattrStage(factory, **kwargs)
except ImportError as e:
return get_placeholder_class(e)


# Pre-serialization stages
base_stage = CattrStage() #: Base stage for all serializer pipelines
utf8_encoder = Stage(dumps=str.encode, loads=lambda x: x.decode()) #: Encode to bytes
bson_preconf_stage = make_stage('cattr.preconf.bson') #: Pre-serialization steps for BSON
json_preconf_stage = make_stage('cattr.preconf.json') #: Pre-serialization steps for JSON
msgpack_preconf_stage = make_stage('cattr.preconf.msgpack') #: Pre-serialization steps for msgpack
orjson_preconf_stage = make_stage('cattr.preconf.orjson') #: Pre-serialization steps for orjson
toml_preconf_stage = make_stage('cattr.preconf.tomlkit') #: Pre-serialization steps for TOML
ujson_preconf_stage = make_stage('cattr.preconf.ujson') #: Pre-serialization steps for ultrajson
yaml_preconf_stage = make_stage('cattr.preconf.pyyaml') #: Pre-serialization steps for YAML

# Basic serializers with no additional dependencies
dict_serializer = SerializerPipeline(
[base_stage], is_binary=False
) #: Partial serializer that unstructures responses into dicts
pickle_serializer = SerializerPipeline(
[base_stage, Stage(pickle)], is_binary=True
) #: Pickle serializer

# Safe pickle serializer
try:
def signer_stage(secret_key=None, salt='requests-cache') -> Stage:
"""Create a stage that uses ``itsdangerous`` to add a signature to responses on write, and
validate that signature with a secret key on read. Can be used in a
:py:class:`.SerializerPipeline` in combination with any other serialization steps.
"""
from itsdangerous import Signer

def signer_stage(secret_key=None, salt='requests-cache') -> Stage:
"""Create a stage that uses ``itsdangerous`` to add a signature to responses on write, and
validate that signature with a secret key on read. Can be used in a
:py:class:`.SerializerPipeline` in combination with any other serialization steps.
"""
return Stage(Signer(secret_key=secret_key, salt=salt), dumps='sign', loads='unsign')

def safe_pickle_serializer(
secret_key=None, salt='requests-cache', **kwargs
) -> SerializerPipeline:
"""Create a serializer that uses ``pickle`` + ``itsdangerous`` to add a signature to
responses on write, and validate that signature with a secret key on read.
"""
return SerializerPipeline(
[base_stage, pickle, signer_stage(secret_key, salt)], is_binary=True
)
return Stage(
Signer(secret_key=secret_key, salt=salt),
dumps='sign',
loads='unsign',
)


def safe_pickle_serializer(secret_key=None, salt='requests-cache', **kwargs) -> SerializerPipeline:
"""Create a serializer that uses ``pickle`` + ``itsdangerous`` to add a signature to
responses on write, and validate that signature with a secret key on read.
"""
return SerializerPipeline(
[base_stage, Stage(pickle), signer_stage(secret_key, salt)],
is_binary=True,
)


try:
import itsdangerous # noqa: F401
except ImportError as e:
signer_stage = get_placeholder_class(e)
safe_pickle_serializer = get_placeholder_class(e)


# BSON serializer
try:
def _get_bson_functions():
"""Handle different function names between pymongo's bson and standalone bson"""
try:
from bson import decode as _bson_loads
from bson import encode as _bson_dumps
import pymongo # noqa: F401

return {'dumps': 'encode', 'loads': 'decode'}
except ImportError:
from bson import dumps as _bson_dumps
from bson import loads as _bson_loads
return {'dumps': 'dumps', 'loads': 'loads'}


try:
import bson

bson_serializer = SerializerPipeline(
[bson_preconf_stage, Stage(dumps=_bson_dumps, loads=_bson_loads)], is_binary=True
[bson_preconf_stage, Stage(bson, **_get_bson_functions())],
is_binary=True,
) #: Complete BSON serializer; uses pymongo's ``bson`` if installed, otherwise standalone ``bson`` codec
except ImportError as e:
bson_serializer = get_placeholder_class(e)
Expand All @@ -94,19 +120,18 @@ def safe_pickle_serializer(

_json_stage = Stage(dumps=partial(json.dumps, indent=2), loads=json.loads)
json_serializer = SerializerPipeline(
[_json_preconf_stage, _json_stage], is_binary=False
[_json_preconf_stage, _json_stage],
is_binary=False,
) #: Complete JSON serializer; uses ultrajson if available


# YAML serializer
try:
import yaml

_yaml_stage = Stage(yaml, loads='safe_load', dumps='safe_dump')
yaml_serializer = SerializerPipeline(
[
yaml_preconf_stage,
Stage(yaml, loads='safe_load', dumps='safe_dump'),
],
[yaml_preconf_stage, _yaml_stage],
is_binary=False,
) #: Complete YAML serializer
except ImportError as e:
Expand Down

0 comments on commit 66550b5

Please sign in to comment.