Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

S3: Workaround boto bug to fix collectstatic issue #955

Merged
merged 2 commits into from Dec 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
7 changes: 4 additions & 3 deletions storages/backends/s3boto3.py
Expand Up @@ -16,8 +16,8 @@

from storages.base import BaseStorage
from storages.utils import (
check_location, get_available_overwrite_name, lookup_env, safe_join,
setting,
NonCloseableBufferedReader, check_location, get_available_overwrite_name,
lookup_env, safe_join, setting,
)

try:
Expand Down Expand Up @@ -445,7 +445,8 @@ def _save(self, name, content):

obj = self.bucket.Object(name)
content.seek(0, os.SEEK_SET)
obj.upload_fileobj(content, ExtraArgs=params)
with NonCloseableBufferedReader(content) as reader:
obj.upload_fileobj(reader, ExtraArgs=params)
return cleaned_name

def delete(self, name):
Expand Down
8 changes: 8 additions & 0 deletions storages/utils.py
@@ -1,3 +1,4 @@
import io
import os
import posixpath

Expand Down Expand Up @@ -117,3 +118,10 @@ def get_available_overwrite_name(name, max_length):
'allows sufficient "max_length".' % name
)
return os.path.join(dir_name, "{}{}".format(file_root, file_ext))


# A buffered reader that does not actually close, workaround for
# https://github.com/boto/s3transfer/issues/80#issuecomment-562356142
class NonCloseableBufferedReader(io.BufferedReader):
def close(self):
self.flush()
46 changes: 26 additions & 20 deletions tests/test_s3boto3.py
Expand Up @@ -22,6 +22,12 @@ def setUp(self):
self.storage._connections.connection = mock.MagicMock()


def assert_called_upload(mock_uploadobj, content, extra):
assert mock_uploadobj.call_count == 1
assert mock_uploadobj.call_args[0][0].raw == content
assert mock_uploadobj.call_args[1] == extra


class S3Boto3StorageTests(S3Boto3TestCase):

def test_clean_name(self):
Expand Down Expand Up @@ -107,12 +113,12 @@ def test_storage_save(self):
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
extra = {
'ExtraArgs': {
'ContentType': 'text/plain',
}
)
}
assert_called_upload(obj.upload_fileobj, content, extra)

def test_storage_save_with_default_acl(self):
"""
Expand All @@ -125,13 +131,13 @@ def test_storage_save_with_default_acl(self):
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
extra = {
'ExtraArgs': {
'ContentType': 'text/plain',
'ACL': 'private',
}
)
}
assert_called_upload(obj.upload_fileobj, content, extra)

def test_storage_object_parameters_not_overwritten_by_default(self):
"""
Expand All @@ -145,13 +151,13 @@ def test_storage_object_parameters_not_overwritten_by_default(self):
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
extra = {
'ExtraArgs': {
'ContentType': 'text/plain',
'ACL': 'private',
}
)
}
assert_called_upload(obj.upload_fileobj, content, extra)

def test_content_type(self):
"""
Expand All @@ -164,12 +170,12 @@ def test_content_type(self):
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
extra = {
'ExtraArgs': {
'ContentType': 'image/jpeg',
}
)
}
assert_called_upload(obj.upload_fileobj, content, extra)

def test_storage_save_gzipped(self):
"""
Expand All @@ -179,13 +185,13 @@ def test_storage_save_gzipped(self):
content = ContentFile("I am gzip'd")
self.storage.save(name, content)
obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
extra = {
'ExtraArgs': {
'ContentType': 'application/octet-stream',
'ContentEncoding': 'gzip',
}
)
}
assert_called_upload(obj.upload_fileobj, content, extra)

def test_storage_save_gzip(self):
"""
Expand Down