Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tests: add retry conf s7 resumable upload test cases #720

Merged
merged 4 commits into from Mar 9, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
16 changes: 14 additions & 2 deletions tests/conformance/retry_strategy_test_data.json
Expand Up @@ -243,8 +243,20 @@
{
"id": 7,
"description": "resumable_uploads_handle_complex_retries",
"cases": [],
"methods": [],
"cases": [
{
"instructions": ["return-reset-connection", "return-503"]
},
{
"instructions": ["return-503-after-256K"]
},
{
"instructions": ["return-503-after-12288K"]
}
],
"methods": [
{"name": "storage.objects.insert", "group": "storage.resumable.upload", "resources": ["BUCKET"]}
],
"preconditionProvided": true,
"expectSuccess": true
},
Expand Down
69 changes: 62 additions & 7 deletions tests/conformance/test_conformance.py
Expand Up @@ -54,6 +54,7 @@

_STRING_CONTENT = "hello world"
_BYTE_CONTENT = b"12345678"
_RESUMABLE_UPLOAD_CHUNK_SIZE = 4 * 1024 * 1024


########################################################################################################################################
Expand Down Expand Up @@ -461,7 +462,7 @@ def blob_upload_from_string(client, _preconditions, **resources):
bucket = resources.get("bucket")
_, data = resources.get("file_data")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
blob.chunk_size = 4 * 1024 * 1024
blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE
if _preconditions:
blob.upload_from_string(data, if_generation_match=0)
else:
Expand All @@ -471,26 +472,67 @@ def blob_upload_from_string(client, _preconditions, **resources):

def blob_upload_from_file(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
file, data = resources.get("file_data")
file_blob = client.bucket(bucket.name).blob(file.name)
upload_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
upload_blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

with tempfile.NamedTemporaryFile() as temp_f:
# Create a named temporary file with payload.
with open(temp_f.name, "wb") as file_obj:
client.download_blob_to_file(file_blob, file_obj)
# Upload the temporary file and assert data integrity.
if _preconditions:
blob.upload_from_file(temp_f, if_generation_match=0)
upload_blob.upload_from_file(temp_f, if_generation_match=0)
else:
blob.upload_from_file(temp_f)
upload_blob.upload_from_file(temp_f)

upload_blob.reload()
assert upload_blob.size == len(data)


def blob_upload_from_filename(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

bucket = resources.get("bucket")
file, data = resources.get("file_data")
file_blob = client.bucket(bucket.name).blob(file.name)
upload_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
upload_blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

with tempfile.NamedTemporaryFile() as temp_f:
# Create a named temporary file with payload.
with open(temp_f.name, "wb") as file_obj:
client.download_blob_to_file(file_blob, file_obj)
# Upload the temporary file and assert data integrity.
if _preconditions:
blob.upload_from_filename(temp_f.name, if_generation_match=0)
upload_blob.upload_from_filename(temp_f.name, if_generation_match=0)
else:
blob.upload_from_filename(temp_f.name)
upload_blob.upload_from_filename(temp_f.name)

upload_blob.reload()
assert upload_blob.size == len(data)


def blobwriter_write(client, _preconditions, **resources):
bucket = resources.get("bucket")
_, data = resources.get("file_data")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
if _preconditions:
with blob.open(
"w", chunk_size=_RESUMABLE_UPLOAD_CHUNK_SIZE, if_generation_match=0
) as writer:
writer.write(data)
else:
with blob.open("w", chunk_size=_RESUMABLE_UPLOAD_CHUNK_SIZE) as writer:
writer.write(data)
blob.reload()
assert blob.size == len(data)


def blobwriter_write_multipart(client, _preconditions, **resources):
chunk_size = 256 * 1024
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
Expand All @@ -502,6 +544,15 @@ def blobwriter_write(client, _preconditions, **resources):
writer.write(_BYTE_CONTENT)


def blob_upload_from_string_multipart(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
if _preconditions:
blob.upload_from_string(_STRING_CONTENT, if_generation_match=0)
else:
blob.upload_from_string(_STRING_CONTENT)


def blob_create_resumable_upload_session(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
Expand Down Expand Up @@ -745,11 +796,15 @@ def object_acl_clear(client, _preconditions, **resources):
bucket_rename_blob,
],
"storage.objects.insert": [
blob_upload_from_string_multipart,
blobwriter_write_multipart,
blob_create_resumable_upload_session,
],
"storage.resumable.upload": [
blob_upload_from_string,
blob_upload_from_file,
blob_upload_from_filename,
blobwriter_write,
blob_create_resumable_upload_session,
],
"storage.objects.patch": [
blob_patch,
Expand Down