Skip to content

Commit

Permalink
Revert "[general] Update default size when using `SpooledTemporaryFil…
Browse files Browse the repository at this point in the history
…e` (#1359)" (#1377)

This reverts commit 5111199.
  • Loading branch information
jschneier committed Apr 21, 2024
1 parent 68ef6ba commit 51877a4
Show file tree
Hide file tree
Showing 8 changed files with 12 additions and 29 deletions.
2 changes: 1 addition & 1 deletion docs/backends/amazon-S3.rst
Expand Up @@ -91,7 +91,7 @@ Settings

``max_memory_size`` or ``AWS_S3_MAX_MEMORY_SIZE``

Default: ``FILE_UPLOAD_MAX_MEMORY_SIZE``
Default: ``0`` i.e do not roll over

The maximum amount of memory (in bytes) a file can take up before being rolled over
into a temporary file on disk.
Expand Down
4 changes: 2 additions & 2 deletions docs/backends/azure.rst
Expand Up @@ -123,9 +123,9 @@ Settings

Global connection timeout in seconds.

``max_memory_size`` size ``AZURE_BLOB_MAX_MEMORY_SIZE``
``max_memory`` size ``AZURE_BLOB_MAX_MEMORY_SIZE``

Default: ``FILE_UPLOAD_MAX_MEMORY_SIZE``
Default: ``2*1024*1024`` i.e ``2MB``

Maximum memory used by a downloaded file before dumping it to disk in bytes.

Expand Down
5 changes: 0 additions & 5 deletions docs/backends/dropbox.rst
Expand Up @@ -120,11 +120,6 @@ Settings

Sets the Dropbox WriteMode strategy. Read more in the `official docs`_.

``max_memory_size`` size ``DROPBOX_MAX_MEMORY_SIZE``

Default: ``FILE_UPLOAD_MAX_MEMORY_SIZE``

Maximum memory used by a downloaded file before dumping it to disk in bytes.

.. _`tutorial`: https://www.dropbox.com/developers/documentation/python#tutorial
.. _`Dropbox SDK for Python`: https://www.dropbox.com/developers/documentation/python#tutorial
Expand Down
4 changes: 2 additions & 2 deletions docs/backends/gcloud.rst
Expand Up @@ -147,10 +147,10 @@ Settings

``max_memory_size`` or ``GS_MAX_MEMORY_SIZE``

default: ``FILE_UPLOAD_MAX_MEMORY_SIZE``
default: ``0`` i.e do not rollover

The maximum amount of memory a returned file can take up (in bytes) before being
rolled over into a temporary file on disk.
rolled over into a temporary file on disk. Default is 0: Do not roll over.

``blob_chunk_size`` or ``GS_BLOB_CHUNK_SIZE``

Expand Down
6 changes: 2 additions & 4 deletions storages/backends/azure_storage.py
Expand Up @@ -40,7 +40,7 @@ def _get_file(self):
file = SpooledTemporaryFile(
max_size=self._storage.max_memory_size,
suffix=".AzureStorageFile",
dir=setting("FILE_UPLOAD_TEMP_DIR"),
dir=setting("FILE_UPLOAD_TEMP_DIR", None),
)

if "r" in self._mode or "a" in self._mode:
Expand Down Expand Up @@ -141,9 +141,7 @@ def get_default_settings(self):
"azure_ssl": setting("AZURE_SSL", True),
"upload_max_conn": setting("AZURE_UPLOAD_MAX_CONN", 2),
"timeout": setting("AZURE_CONNECTION_TIMEOUT_SECS", 20),
"max_memory_size": setting(
"AZURE_BLOB_MAX_MEMORY_SIZE", setting("FILE_UPLOAD_MAX_MEMORY_SIZE")
),
"max_memory_size": setting("AZURE_BLOB_MAX_MEMORY_SIZE", 2 * 1024 * 1024),
"expiration_secs": setting("AZURE_URL_EXPIRATION_SECS"),
"overwrite_files": setting("AZURE_OVERWRITE_FILES", False),
"location": setting("AZURE_LOCATION", ""),
Expand Down
9 changes: 1 addition & 8 deletions storages/backends/dropbox.py
Expand Up @@ -47,11 +47,7 @@ def __init__(self, name, storage):

def _get_file(self):
if self._file is None:
self._file = SpooledTemporaryFile(
max_size=self._storage.max_memory_size,
suffix=".DropboxFile",
dir=setting("FILE_UPLOAD_TEMP_DIR"),
)
self._file = SpooledTemporaryFile()
# As dropbox==9.3.0, the client returns a tuple
# (dropbox.files.FileMetadata, requests.models.Response)
file_metadata, response = self._storage.client.files_download(self.name)
Expand Down Expand Up @@ -123,9 +119,6 @@ def get_default_settings(self):
"oauth2_refresh_token": setting("DROPBOX_OAUTH2_REFRESH_TOKEN"),
"timeout": setting("DROPBOX_TIMEOUT", _DEFAULT_TIMEOUT),
"write_mode": setting("DROPBOX_WRITE_MODE", _DEFAULT_MODE),
"max_memory_size": setting(
"DROPBOX_MAX_MEMORY_SIZE", setting("FILE_UPLOAD_MAX_MEMORY_SIZE")
),
}

def _full_path(self, name):
Expand Down
7 changes: 3 additions & 4 deletions storages/backends/gcloud.py
Expand Up @@ -138,10 +138,9 @@ def get_default_settings(self):
"file_overwrite": setting("GS_FILE_OVERWRITE", True),
"object_parameters": setting("GS_OBJECT_PARAMETERS", {}),
# The max amount of memory a returned file can take up before being
# rolled over into a temporary file on disk.
"max_memory_size": setting(
"GS_MAX_MEMORY_SIZE", setting("FILE_UPLOAD_MAX_MEMORY_SIZE")
),
# rolled over into a temporary file on disk. Default is 0: Do not
# roll over.
"max_memory_size": setting("GS_MAX_MEMORY_SIZE", 0),
"blob_chunk_size": setting("GS_BLOB_CHUNK_SIZE"),
}

Expand Down
4 changes: 1 addition & 3 deletions storages/backends/s3.py
Expand Up @@ -403,9 +403,7 @@ def get_default_settings(self):
"region_name": setting("AWS_S3_REGION_NAME"),
"use_ssl": setting("AWS_S3_USE_SSL", True),
"verify": setting("AWS_S3_VERIFY", None),
"max_memory_size": setting(
"AWS_S3_MAX_MEMORY_SIZE", setting("FILE_UPLOAD_MAX_MEMORY_SIZE")
),
"max_memory_size": setting("AWS_S3_MAX_MEMORY_SIZE", 0),
"default_acl": setting("AWS_DEFAULT_ACL", None),
"use_threads": setting("AWS_S3_USE_THREADS", True),
"transfer_config": setting("AWS_S3_TRANSFER_CONFIG", None),
Expand Down

0 comments on commit 51877a4

Please sign in to comment.