From 3623b589a98c99637f28eb28e2fde5b389c692c3 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sun, 15 Nov 2020 19:26:46 -0500 Subject: [PATCH] S3: Workaround boto bug to fix collectstatic issue --- storages/backends/s3boto3.py | 5 ++-- storages/utils.py | 8 ++++++ tests/test_s3boto3.py | 47 +++++++++++++++++++++--------------- 3 files changed, 38 insertions(+), 22 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 38a52df13..95c49b3e4 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -17,7 +17,7 @@ from storages.base import BaseStorage from storages.utils import ( check_location, get_available_overwrite_name, lookup_env, safe_join, - setting, + setting, NonCloseableBufferedReader, ) try: @@ -445,7 +445,8 @@ def _save(self, name, content): obj = self.bucket.Object(name) content.seek(0, os.SEEK_SET) - obj.upload_fileobj(content, ExtraArgs=params) + with NonCloseableBufferedReader(content) as reader: + obj.upload_fileobj(reader, ExtraArgs=params) return cleaned_name def delete(self, name): diff --git a/storages/utils.py b/storages/utils.py index d547a43f2..cc3b40b99 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -1,3 +1,4 @@ +import io import os import posixpath @@ -117,3 +118,10 @@ def get_available_overwrite_name(name, max_length): 'allows sufficient "max_length".' % name ) return os.path.join(dir_name, "{}{}".format(file_root, file_ext)) + + +# A buffered reader that does not actually close, workaround for +# https://github.com/boto/s3transfer/issues/80#issuecomment-562356142 +class NonCloseableBufferedReader(io.BufferedReader): + def close(self): + self.flush() diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index f4415c94e..c81378234 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -13,6 +13,7 @@ from django.test import TestCase, override_settings from django.utils.timezone import is_aware, utc +from storages import utils from storages.backends import s3boto3 @@ -22,6 +23,12 @@ def setUp(self): self.storage._connections.connection = mock.MagicMock() +def assert_called_upload(mock_uploadobj, content, extra): + assert mock_uploadobj.call_count == 1 + assert mock_uploadobj.call_args[0][0].raw == content + assert mock_uploadobj.call_args[1] == extra + + class S3Boto3StorageTests(S3Boto3TestCase): def test_clean_name(self): @@ -107,12 +114,12 @@ def test_storage_save(self): self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ + extra = { + 'ExtraArgs': { 'ContentType': 'text/plain', } - ) + } + assert_called_upload(obj.upload_fileobj, content, extra) def test_storage_save_with_default_acl(self): """ @@ -125,13 +132,13 @@ def test_storage_save_with_default_acl(self): self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ + extra = { + 'ExtraArgs': { 'ContentType': 'text/plain', 'ACL': 'private', } - ) + } + assert_called_upload(obj.upload_fileobj, content, extra) def test_storage_object_parameters_not_overwritten_by_default(self): """ @@ -145,13 +152,13 @@ def test_storage_object_parameters_not_overwritten_by_default(self): self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ + extra = { + 'ExtraArgs': { 'ContentType': 'text/plain', 'ACL': 'private', } - ) + } + assert_called_upload(obj.upload_fileobj, content, extra) def test_content_type(self): """ @@ -164,12 +171,12 @@ def test_content_type(self): self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ + extra = { + 'ExtraArgs': { 'ContentType': 'image/jpeg', } - ) + } + assert_called_upload(obj.upload_fileobj, content, extra) def test_storage_save_gzipped(self): """ @@ -179,13 +186,13 @@ def test_storage_save_gzipped(self): content = ContentFile("I am gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ + extra = { + 'ExtraArgs': { 'ContentType': 'application/octet-stream', 'ContentEncoding': 'gzip', } - ) + } + assert_called_upload(obj.upload_fileobj, content, extra) def test_storage_save_gzip(self): """