Skip to content

Commit

Permalink
tests: add retry conf s7 resumable upload test cases (#720)
Browse files Browse the repository at this point in the history
* tests: add retry conf s7 resumable upload test cases

* adjust fixture and chunk size from design discussion
  • Loading branch information
cojenco committed Mar 9, 2022
1 parent c7bf615 commit 2cb0892
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 13 deletions.
14 changes: 10 additions & 4 deletions tests/conformance/conftest.py
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import os
import random
import uuid

import pytest
Expand All @@ -33,16 +34,21 @@
)
_CONF_TEST_PUBSUB_TOPIC_NAME = "my-topic-name"


"""Create content payload in different sizes."""


def _create_block(desired_kib):
line = "abc123XYZ" * 14 + "!" + "\n"
return 1024 * int(desired_kib / len(line)) * line
line = "abcdefXYZ123456789ADDINGrandom#" # len(line) = 31
multiplier = int(desired_kib / (len(line) + 1))
lines = "".join(
line + str(random.randint(0, 9)) for _ in range(multiplier)
) # add random single digit integers
return 1024 * lines


_STRING_CONTENT = "hello world"
_SIZE_16MB = 16384 # 16*1024 KiB
_SIZE_9MB = 9216 # 9*1024 KiB


########################################################################################################################################
Expand Down Expand Up @@ -115,7 +121,7 @@ def hmac_key(client):
@pytest.fixture
def file_data(client, bucket):
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
payload = _create_block(_SIZE_16MB)
payload = _create_block(_SIZE_9MB)
blob.upload_from_string(payload)
yield blob, payload
try:
Expand Down
16 changes: 14 additions & 2 deletions tests/conformance/retry_strategy_test_data.json
Expand Up @@ -243,8 +243,20 @@
{
"id": 7,
"description": "resumable_uploads_handle_complex_retries",
"cases": [],
"methods": [],
"cases": [
{
"instructions": ["return-reset-connection", "return-503"]
},
{
"instructions": ["return-503-after-256K"]
},
{
"instructions": ["return-503-after-8192K"]
}
],
"methods": [
{"name": "storage.objects.insert", "group": "storage.resumable.upload", "resources": ["BUCKET"]}
],
"preconditionProvided": true,
"expectSuccess": true
},
Expand Down
69 changes: 62 additions & 7 deletions tests/conformance/test_conformance.py
Expand Up @@ -54,6 +54,7 @@

_STRING_CONTENT = "hello world"
_BYTE_CONTENT = b"12345678"
_RESUMABLE_UPLOAD_CHUNK_SIZE = 2 * 1024 * 1024


########################################################################################################################################
Expand Down Expand Up @@ -461,7 +462,7 @@ def blob_upload_from_string(client, _preconditions, **resources):
bucket = resources.get("bucket")
_, data = resources.get("file_data")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
blob.chunk_size = 4 * 1024 * 1024
blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE
if _preconditions:
blob.upload_from_string(data, if_generation_match=0)
else:
Expand All @@ -471,26 +472,67 @@ def blob_upload_from_string(client, _preconditions, **resources):

def blob_upload_from_file(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
file, data = resources.get("file_data")
file_blob = client.bucket(bucket.name).blob(file.name)
upload_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
upload_blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

with tempfile.NamedTemporaryFile() as temp_f:
# Create a named temporary file with payload.
with open(temp_f.name, "wb") as file_obj:
client.download_blob_to_file(file_blob, file_obj)
# Upload the temporary file and assert data integrity.
if _preconditions:
blob.upload_from_file(temp_f, if_generation_match=0)
upload_blob.upload_from_file(temp_f, if_generation_match=0)
else:
blob.upload_from_file(temp_f)
upload_blob.upload_from_file(temp_f)

upload_blob.reload()
assert upload_blob.size == len(data)


def blob_upload_from_filename(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

bucket = resources.get("bucket")
file, data = resources.get("file_data")
file_blob = client.bucket(bucket.name).blob(file.name)
upload_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
upload_blob.chunk_size = _RESUMABLE_UPLOAD_CHUNK_SIZE

with tempfile.NamedTemporaryFile() as temp_f:
# Create a named temporary file with payload.
with open(temp_f.name, "wb") as file_obj:
client.download_blob_to_file(file_blob, file_obj)
# Upload the temporary file and assert data integrity.
if _preconditions:
blob.upload_from_filename(temp_f.name, if_generation_match=0)
upload_blob.upload_from_filename(temp_f.name, if_generation_match=0)
else:
blob.upload_from_filename(temp_f.name)
upload_blob.upload_from_filename(temp_f.name)

upload_blob.reload()
assert upload_blob.size == len(data)


def blobwriter_write(client, _preconditions, **resources):
bucket = resources.get("bucket")
_, data = resources.get("file_data")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
if _preconditions:
with blob.open(
"w", chunk_size=_RESUMABLE_UPLOAD_CHUNK_SIZE, if_generation_match=0
) as writer:
writer.write(data)
else:
with blob.open("w", chunk_size=_RESUMABLE_UPLOAD_CHUNK_SIZE) as writer:
writer.write(data)
blob.reload()
assert blob.size == len(data)


def blobwriter_write_multipart(client, _preconditions, **resources):
chunk_size = 256 * 1024
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
Expand All @@ -502,6 +544,15 @@ def blobwriter_write(client, _preconditions, **resources):
writer.write(_BYTE_CONTENT)


def blob_upload_from_string_multipart(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
if _preconditions:
blob.upload_from_string(_STRING_CONTENT, if_generation_match=0)
else:
blob.upload_from_string(_STRING_CONTENT)


def blob_create_resumable_upload_session(client, _preconditions, **resources):
bucket = resources.get("bucket")
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
Expand Down Expand Up @@ -745,11 +796,15 @@ def object_acl_clear(client, _preconditions, **resources):
bucket_rename_blob,
],
"storage.objects.insert": [
blob_upload_from_string_multipart,
blobwriter_write_multipart,
blob_create_resumable_upload_session,
],
"storage.resumable.upload": [
blob_upload_from_string,
blob_upload_from_file,
blob_upload_from_filename,
blobwriter_write,
blob_create_resumable_upload_session,
],
"storage.objects.patch": [
blob_patch,
Expand Down

0 comments on commit 2cb0892

Please sign in to comment.