Skip to content

Commit

Permalink
[gcloud] Do stronger retries when all files are immutable
Browse files Browse the repository at this point in the history
  • Loading branch information
mlazowik committed Mar 10, 2022
1 parent 851c512 commit 9567455
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 7 deletions.
7 changes: 7 additions & 0 deletions docs/backends/gcloud.rst
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,13 @@ Sometimes requests can get stuck, so it's better if the timeout is low, couple o
Timeouts will be automatically retried when using `google-cloud-storage` version that includes
https://github.com/googleapis/python-storage/pull/727

``GS_ALL_FILES_IMMUTABLE`` (optional: default is ``False``)

When set to `True` requests that normally require the generation parameter to be retryable, will be always retried.

This is safe only when for every path only the same content is ever possible, and there is no chance for race conditions
between upload and delete.

Usage
-----

Expand Down
35 changes: 28 additions & 7 deletions storages/backends/gcloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
from google.cloud.exceptions import NotFound
from google.cloud.storage import Blob, Client
from google.cloud.storage.blob import _quote
from google.cloud.storage.retry import (
DEFAULT_RETRY, DEFAULT_RETRY_IF_GENERATION_SPECIFIED
)
except ImportError:
raise ImproperlyConfigured("Could not load Google Cloud Storage bindings.\n"
"See https://github.com/GoogleCloudPlatform/gcloud-python")
Expand Down Expand Up @@ -88,7 +91,8 @@ def close(self):
self.blob.upload_from_file(
self.file, rewind=True, content_type=self.mime_type,
predefined_acl=blob_params.get('acl', self._storage.default_acl),
timeout=self._storage.timeout)
timeout=self._storage.timeout,
retry=self._storage.retry_if_generation_specified_or_immutable)
self._file.close()
self._file = None

Expand Down Expand Up @@ -129,9 +133,17 @@ def get_default_settings(self):
# roll over.
"max_memory_size": setting('GS_MAX_MEMORY_SIZE', 0),
"blob_chunk_size": setting('GS_BLOB_CHUNK_SIZE'),
"timeout": setting('GS_TIMEOUT', 60)
"timeout": setting('GS_TIMEOUT', 60),
"all_files_immutable": setting('GS_ALL_FILES_IMMUTABLE', False),
}

@property
def retry_if_generation_specified_or_immutable(self):
if self.all_files_immutable:
return DEFAULT_RETRY

return DEFAULT_RETRY_IF_GENERATION_SPECIFIED

@property
def client(self):
if self._client is None:
Expand Down Expand Up @@ -188,10 +200,15 @@ def _save(self, name, content):
for prop, val in blob_params.items():
setattr(file_object.blob, prop, val)

file_object.blob.upload_from_file(content, rewind=True,
size=getattr(content, 'size', None),
timeout=self.timeout,
**upload_params)
file_object.blob.upload_from_file(
content,
rewind=True,
size=getattr(content, 'size', None),
timeout=self.timeout,
retry=self.retry_if_generation_specified_or_immutable,
**upload_params
)

return cleaned_name

def get_object_parameters(self, name):
Expand All @@ -214,7 +231,11 @@ def get_object_parameters(self, name):
def delete(self, name):
name = self._normalize_name(clean_name(name))
try:
self.bucket.delete_blob(name, timeout=self.timeout)
self.bucket.delete_blob(
name,
timeout=self.timeout,
retry=self.retry_if_generation_specified_or_immutable
)
except NotFound:
pass

Expand Down

0 comments on commit 9567455

Please sign in to comment.