diff --git a/CHANGES b/CHANGES index fd80092..942538a 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,9 @@ -0.2.1 (2019-06-20) +0.2.2 ================== +* Update the seek of closed file fix to work with django-storages 1.9.x. +0.2.1 (2019-06-20) +================== * Add boto3 file seek fix 0.2.0 (2019-06-19) diff --git a/cacheds3storage/__init__.py b/cacheds3storage/__init__.py index 7bda12e..5e29ded 100644 --- a/cacheds3storage/__init__.py +++ b/cacheds3storage/__init__.py @@ -11,28 +11,31 @@ def __init__(self, *args, **kwargs): self.local_storage = get_storage_class( 'compressor.storage.CompressorFileStorage')() - # Fix for "ValueError: seek of closed file" problem with boto3 - # https://github.com/jschneier/django-storages/issues/382#issuecomment-377174808 - def _save_content(self, obj, content, parameters): + # https://github.com/jschneier/django-storages/issues/382 + # #issuecomment-592876060 + def _save(self, name, content): """ - We create a clone of the content file as when this is passed to boto3 it wrongly closes - the file upon upload where as the storage backend expects it to still be open + We create a clone of the content file as when this is passed + to boto3 it wrongly closes the file upon upload where as the + storage backend expects it to still be open """ # Seek our content back to the start content.seek(0, os.SEEK_SET) - # Create a temporary file that will write to disk after a specified size - content_autoclose = SpooledTemporaryFile() + # Create a temporary file that will write to disk after a + # specified size. This file will be automatically deleted when + # closed by boto3 or after exiting the `with` statement if the + # boto3 is fixed + with SpooledTemporaryFile() as content_autoclose: - # Write our original content into our copy that will be closed by boto3 - content_autoclose.write(content.read()) + # Write our original content into our copy that will be + # closed by boto3 + content_autoclose.write(content.read()) - # Upload the object which will auto close the content_autoclose instance - super(CachedS3BotoStorage, self)._save_content(obj, content_autoclose, parameters) - - # Cleanup if this is fixed upstream our duplicate should always close - if not content_autoclose.closed: - content_autoclose.close() + # Upload the object which will auto close the + # content_autoclose instance + return super(CachedS3BotoStorage, self)._save( + name, content_autoclose) def save(self, name, content): name = super(CachedS3BotoStorage, self).save(name, content)