From 093e3c27a43c22b832da86c6853116b7bb1d28fe Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Thu, 17 Mar 2022 02:40:10 -0700 Subject: [PATCH 01/15] chore: Change the Codeowner to cloud-native-db-dpes (#288) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Change the Codeowner to cloud-native-db-dpes * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .github/CODEOWNERS | 8 ++++---- .repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6116b837..cac51240 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/firestore-dpe +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes -# @googleapis/python-samples-reviewers @googleapis/firestore-dpe are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/firestore-dpe +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes diff --git a/.repo-metadata.json b/.repo-metadata.json index a5bf20b2..44c2f180 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -11,6 +11,6 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/firestore-dpe", + "codeowner_team": "@googleapis/cloud-native-db-dpes", "api_shortname": "datastore" } From 34e85cae5a1df0df9ffec0ce014073afb07b0f31 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 23:56:10 +0000 Subject: [PATCH 02/15] chore(python): use black==22.3.0 (#290) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 5 +- google/cloud/datastore/_http.py | 15 +- google/cloud/datastore/helpers.py | 5 +- google/cloud/datastore/key.py | 8 +- .../services/datastore_admin/async_client.py | 47 ++- .../services/datastore_admin/client.py | 88 ++++- .../datastore_admin/transports/base.py | 22 +- .../datastore_admin/transports/grpc.py | 3 +- .../types/datastore_admin.py | 248 ++++++++++--- .../cloud/datastore_admin_v1/types/index.py | 51 ++- .../datastore_admin_v1/types/migration.py | 25 +- .../services/datastore/async_client.py | 53 ++- .../datastore_v1/services/datastore/client.py | 94 ++++- .../services/datastore/transports/base.py | 26 +- .../services/datastore/transports/grpc.py | 3 +- google/cloud/datastore_v1/types/datastore.py | 234 ++++++++++--- google/cloud/datastore_v1/types/entity.py | 140 ++++++-- google/cloud/datastore_v1/types/query.py | 197 +++++++++-- noxfile.py | 9 +- tests/system/conftest.py | 4 +- tests/system/test_allocate_reserve_ids.py | 3 +- tests/system/test_put.py | 21 +- tests/system/test_query.py | 21 +- tests/system/test_transaction.py | 11 +- tests/system/utils/populate_datastore.py | 18 +- .../test_datastore_admin.py | 305 ++++++++++++---- .../unit/gapic/datastore_v1/test_datastore.py | 330 +++++++++++++----- tests/unit/test__gapic.py | 4 +- tests/unit/test__http.py | 56 ++- tests/unit/test_client.py | 20 +- tests/unit/test_helpers.py | 38 +- tests/unit/test_key.py | 19 +- tests/unit/test_transaction.py | 4 +- 34 files changed, 1657 insertions(+), 472 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7c..87dd0061 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/docs/conf.py b/docs/conf.py index d51558be..febe857a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/datastore/_http.py b/google/cloud/datastore/_http.py index f92c76f0..60b8af89 100644 --- a/google/cloud/datastore/_http.py +++ b/google/cloud/datastore/_http.py @@ -53,7 +53,14 @@ def _make_request_pb(request, request_pb_type): def _request( - http, project, method, data, base_url, client_info, retry=None, timeout=None, + http, + project, + method, + data, + base_url, + client_info, + retry=None, + timeout=None, ): """Make a request over the Http transport to the Cloud Datastore API. @@ -103,7 +110,11 @@ def _request( if timeout is not None: response = requester( - url=api_url, method="POST", headers=headers, data=data, timeout=timeout, + url=api_url, + method="POST", + headers=headers, + data=data, + timeout=timeout, ) else: response = requester(url=api_url, method="POST", headers=headers, data=data) diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py index 85dfc240..f976070e 100644 --- a/google/cloud/datastore/helpers.py +++ b/google/cloud/datastore/helpers.py @@ -418,7 +418,10 @@ def _get_value_from_value_pb(pb): ] elif value_type == "geo_point_value": - result = GeoPoint(pb.geo_point_value.latitude, pb.geo_point_value.longitude,) + result = GeoPoint( + pb.geo_point_value.latitude, + pb.geo_point_value.longitude, + ) elif value_type == "null_value": result = None diff --git a/google/cloud/datastore/key.py b/google/cloud/datastore/key.py index 76f18455..1a8e3645 100644 --- a/google/cloud/datastore/key.py +++ b/google/cloud/datastore/key.py @@ -361,7 +361,7 @@ def from_legacy_urlsafe(cls, urlsafe): reference.ParseFromString(raw_bytes) project = _clean_app(reference.app) - namespace = _get_empty(reference.name_space, u"") + namespace = _get_empty(reference.name_space, "") _check_database_id(reference.database_id) flat_path = _get_flat_path(reference.path) return cls(*flat_path, project=project, namespace=namespace) @@ -554,7 +554,7 @@ def _check_database_id(database_id): :raises: :exc:`ValueError` if the ``database_id`` is not empty. """ - if database_id != u"": + if database_id != "": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg) @@ -580,13 +580,13 @@ def _add_id_or_name(flat_path, element_pb, empty_allowed): # NOTE: Below 0 and the empty string are the "null" values for their # respective types, indicating that the value is unset. if id_ == 0: - if name == u"": + if name == "": if not empty_allowed: raise ValueError(_EMPTY_ELEMENT) else: flat_path.append(name) else: - if name == u"": + if name == "": flat_path.append(id_) else: msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index ebac62bd..c6cd885c 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -411,7 +411,12 @@ def sample_export_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -574,7 +579,12 @@ def sample_import_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -665,7 +675,12 @@ def sample_create_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -755,7 +770,12 @@ def sample_delete_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -832,7 +852,12 @@ def sample_get_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -911,12 +936,20 @@ def sample_list_indexes(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListIndexesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 4f4f9211..74bf49c4 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -59,7 +59,10 @@ class DatastoreAdminClientMeta(type): _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreAdminTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreAdminTransport]: """Returns an appropriate transport class. Args: @@ -219,7 +222,9 @@ def transport(self) -> DatastoreAdminTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -232,9 +237,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -243,9 +252,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -254,9 +267,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -265,10 +282,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -590,7 +611,12 @@ def sample_export_entities(): rpc = self._transport._wrapped_methods[self._transport.export_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -752,7 +778,12 @@ def sample_import_entities(): rpc = self._transport._wrapped_methods[self._transport.import_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -844,7 +875,12 @@ def sample_create_index(): rpc = self._transport._wrapped_methods[self._transport.create_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -935,7 +971,12 @@ def sample_delete_index(): rpc = self._transport._wrapped_methods[self._transport.delete_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1003,7 +1044,12 @@ def sample_get_index(): rpc = self._transport._wrapped_methods[self._transport.get_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1073,12 +1119,20 @@ def sample_list_indexes(): rpc = self._transport._wrapped_methods[self._transport.list_indexes] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListIndexesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 1b47ae2b..daa2096f 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -127,16 +127,24 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.export_entities: gapic_v1.method.wrap_method( - self.export_entities, default_timeout=60.0, client_info=client_info, + self.export_entities, + default_timeout=60.0, + client_info=client_info, ), self.import_entities: gapic_v1.method.wrap_method( - self.import_entities, default_timeout=60.0, client_info=client_info, + self.import_entities, + default_timeout=60.0, + client_info=client_info, ), self.create_index: gapic_v1.method.wrap_method( - self.create_index, default_timeout=60.0, client_info=client_info, + self.create_index, + default_timeout=60.0, + client_info=client_info, ), self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, default_timeout=60.0, client_info=client_info, + self.delete_index, + default_timeout=60.0, + client_info=client_info, ), self.get_index: gapic_v1.method.wrap_method( self.get_index, @@ -173,9 +181,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index e27734f8..ba43c4b6 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -285,8 +285,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index 4e5ad0da..a490fd93 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -83,11 +83,31 @@ class State(proto.Enum): FAILED = 6 CANCELLED = 7 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - operation_type = proto.Field(proto.ENUM, number=3, enum="OperationType",) - labels = proto.MapField(proto.STRING, proto.STRING, number=4,) - state = proto.Field(proto.ENUM, number=5, enum=State,) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_type = proto.Field( + proto.ENUM, + number=3, + enum="OperationType", + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) class Progress(proto.Message): @@ -103,8 +123,14 @@ class Progress(proto.Message): unavailable. """ - work_completed = proto.Field(proto.INT64, number=1,) - work_estimated = proto.Field(proto.INT64, number=2,) + work_completed = proto.Field( + proto.INT64, + number=1, + ) + work_estimated = proto.Field( + proto.INT64, + number=2, + ) class ExportEntitiesRequest(proto.Message): @@ -145,10 +171,24 @@ class ExportEntitiesRequest(proto.Message): without conflict. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - entity_filter = proto.Field(proto.MESSAGE, number=3, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=3, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=4, + ) class ImportEntitiesRequest(proto.Message): @@ -185,10 +225,24 @@ class ImportEntitiesRequest(proto.Message): specified then all entities from the export are imported. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - input_url = proto.Field(proto.STRING, number=3,) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + input_url = proto.Field( + proto.STRING, + number=3, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) class ExportEntitiesResponse(proto.Message): @@ -204,7 +258,10 @@ class ExportEntitiesResponse(proto.Message): Only present if the operation completed successfully. """ - output_url = proto.Field(proto.STRING, number=1,) + output_url = proto.Field( + proto.STRING, + number=1, + ) class ExportEntitiesMetadata(proto.Message): @@ -230,11 +287,30 @@ class ExportEntitiesMetadata(proto.Message): [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=5, + ) class ImportEntitiesMetadata(proto.Message): @@ -259,11 +335,30 @@ class ImportEntitiesMetadata(proto.Message): field. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - input_url = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + input_url = proto.Field( + proto.STRING, + number=5, + ) class EntityFilter(proto.Message): @@ -298,8 +393,14 @@ class EntityFilter(proto.Message): Each namespace in this list must be unique. """ - kinds = proto.RepeatedField(proto.STRING, number=1,) - namespace_ids = proto.RepeatedField(proto.STRING, number=2,) + kinds = proto.RepeatedField( + proto.STRING, + number=1, + ) + namespace_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) class CreateIndexRequest(proto.Message): @@ -316,8 +417,15 @@ class CreateIndexRequest(proto.Message): deleted. """ - project_id = proto.Field(proto.STRING, number=1,) - index = proto.Field(proto.MESSAGE, number=3, message=gda_index.Index,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index = proto.Field( + proto.MESSAGE, + number=3, + message=gda_index.Index, + ) class DeleteIndexRequest(proto.Message): @@ -331,8 +439,14 @@ class DeleteIndexRequest(proto.Message): The resource ID of the index to delete. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class GetIndexRequest(proto.Message): @@ -346,8 +460,14 @@ class GetIndexRequest(proto.Message): The resource ID of the index to get. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class ListIndexesRequest(proto.Message): @@ -367,10 +487,22 @@ class ListIndexesRequest(proto.Message): request, if any. """ - project_id = proto.Field(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ListIndexesResponse(proto.Message): @@ -388,8 +520,15 @@ class ListIndexesResponse(proto.Message): def raw_page(self): return self - indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gda_index.Index,) - next_page_token = proto.Field(proto.STRING, number=2,) + indexes = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gda_index.Index, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class IndexOperationMetadata(proto.Message): @@ -407,9 +546,20 @@ class IndexOperationMetadata(proto.Message): acting on. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - index_id = proto.Field(proto.STRING, number=3,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class DatastoreFirestoreMigrationMetadata(proto.Message): @@ -431,8 +581,16 @@ class DatastoreFirestoreMigrationMetadata(proto.Message): Datastore to Cloud Firestore in Datastore mode. """ - migration_state = proto.Field(proto.ENUM, number=1, enum=migration.MigrationState,) - migration_step = proto.Field(proto.ENUM, number=2, enum=migration.MigrationStep,) + migration_state = proto.Field( + proto.ENUM, + number=1, + enum=migration.MigrationState, + ) + migration_step = proto.Field( + proto.ENUM, + number=2, + enum=migration.MigrationStep, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_admin_v1/types/index.py b/google/cloud/datastore_admin_v1/types/index.py index 8d50f03a..e00c3bb7 100644 --- a/google/cloud/datastore_admin_v1/types/index.py +++ b/google/cloud/datastore_admin_v1/types/index.py @@ -16,7 +16,12 @@ import proto # type: ignore -__protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "Index", + }, +) class Index(proto.Message): @@ -73,15 +78,43 @@ class IndexedProperty(proto.Message): DIRECTION_UNSPECIFIED. """ - name = proto.Field(proto.STRING, number=1,) - direction = proto.Field(proto.ENUM, number=2, enum="Index.Direction",) + name = proto.Field( + proto.STRING, + number=1, + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum="Index.Direction", + ) - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) - kind = proto.Field(proto.STRING, number=4,) - ancestor = proto.Field(proto.ENUM, number=5, enum=AncestorMode,) - properties = proto.RepeatedField(proto.MESSAGE, number=6, message=IndexedProperty,) - state = proto.Field(proto.ENUM, number=7, enum=State,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) + kind = proto.Field( + proto.STRING, + number=4, + ) + ancestor = proto.Field( + proto.ENUM, + number=5, + enum=AncestorMode, + ) + properties = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=IndexedProperty, + ) + state = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_admin_v1/types/migration.py b/google/cloud/datastore_admin_v1/types/migration.py index 18cdd8d6..97d4145f 100644 --- a/google/cloud/datastore_admin_v1/types/migration.py +++ b/google/cloud/datastore_admin_v1/types/migration.py @@ -57,7 +57,11 @@ class MigrationStateEvent(proto.Message): The new state of the migration. """ - state = proto.Field(proto.ENUM, number=1, enum="MigrationState",) + state = proto.Field( + proto.ENUM, + number=1, + enum="MigrationState", + ) class MigrationProgressEvent(proto.Message): @@ -105,7 +109,9 @@ class PrepareStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) class RedirectWritesStepDetails(proto.Message): @@ -117,12 +123,21 @@ class RedirectWritesStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) - step = proto.Field(proto.ENUM, number=1, enum="MigrationStep",) + step = proto.Field( + proto.ENUM, + number=1, + enum="MigrationStep", + ) prepare_step_details = proto.Field( - proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, + proto.MESSAGE, + number=2, + oneof="step_details", + message=PrepareStepDetails, ) redirect_writes_step_details = proto.Field( proto.MESSAGE, diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index c6f8431b..247a388c 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -309,7 +309,12 @@ def sample_lookup(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -381,7 +386,12 @@ def sample_run_query(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -465,7 +475,12 @@ def sample_begin_transaction(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -596,7 +611,12 @@ def sample_commit(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -692,7 +712,12 @@ def sample_rollback(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -790,7 +815,12 @@ def sample_allocate_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -897,7 +927,12 @@ def sample_reserve_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -911,7 +946,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 49c741de..de663367 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -54,7 +54,10 @@ class DatastoreClientMeta(type): _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreTransport]: """Returns an appropriate transport class. Args: @@ -166,7 +169,9 @@ def transport(self) -> DatastoreTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -179,9 +184,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -190,9 +199,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -201,9 +214,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -212,10 +229,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -486,7 +507,12 @@ def sample_lookup(): rpc = self._transport._wrapped_methods[self._transport.lookup] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -549,7 +575,12 @@ def sample_run_query(): rpc = self._transport._wrapped_methods[self._transport.run_query] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -633,7 +664,12 @@ def sample_begin_transaction(): rpc = self._transport._wrapped_methods[self._transport.begin_transaction] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -764,7 +800,12 @@ def sample_commit(): rpc = self._transport._wrapped_methods[self._transport.commit] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -860,7 +901,12 @@ def sample_rollback(): rpc = self._transport._wrapped_methods[self._transport.rollback] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -958,7 +1004,12 @@ def sample_allocate_ids(): rpc = self._transport._wrapped_methods[self._transport.allocate_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1055,7 +1106,12 @@ def sample_reserve_ids(): rpc = self._transport._wrapped_methods[self._transport.reserve_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1076,7 +1132,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index 487a1a45..b50c0fca 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -29,7 +29,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -152,16 +154,24 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, default_timeout=60.0, client_info=client_info, + self.begin_transaction, + default_timeout=60.0, + client_info=client_info, ), self.commit: gapic_v1.method.wrap_method( - self.commit, default_timeout=60.0, client_info=client_info, + self.commit, + default_timeout=60.0, + client_info=client_info, ), self.rollback: gapic_v1.method.wrap_method( - self.rollback, default_timeout=60.0, client_info=client_info, + self.rollback, + default_timeout=60.0, + client_info=client_info, ), self.allocate_ids: gapic_v1.method.wrap_method( - self.allocate_ids, default_timeout=60.0, client_info=client_info, + self.allocate_ids, + default_timeout=60.0, + client_info=client_info, ), self.reserve_ids: gapic_v1.method.wrap_method( self.reserve_ids, @@ -183,9 +193,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 410aa89d..d8a2f001 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -230,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index e77ad1e9..d5d974c2 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -58,9 +58,20 @@ class LookupRequest(proto.Message): Required. Keys of entities to look up. """ - project_id = proto.Field(proto.STRING, number=8,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) - keys = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, + ) class LookupResponse(proto.Message): @@ -83,11 +94,21 @@ class LookupResponse(proto.Message): the order of the keys in the input. """ - found = proto.RepeatedField(proto.MESSAGE, number=1, message=gd_query.EntityResult,) + found = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gd_query.EntityResult, + ) missing = proto.RepeatedField( - proto.MESSAGE, number=2, message=gd_query.EntityResult, + proto.MESSAGE, + number=2, + message=gd_query.EntityResult, + ) + deferred = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, ) - deferred = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) class RunQueryRequest(proto.Message): @@ -123,14 +144,31 @@ class RunQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field(proto.STRING, number=8,) - partition_id = proto.Field(proto.MESSAGE, number=2, message=entity.PartitionId,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + partition_id = proto.Field( + proto.MESSAGE, + number=2, + message=entity.PartitionId, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) query = proto.Field( - proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, + proto.MESSAGE, + number=3, + oneof="query_type", + message=gd_query.Query, ) gql_query = proto.Field( - proto.MESSAGE, number=7, oneof="query_type", message=gd_query.GqlQuery, + proto.MESSAGE, + number=7, + oneof="query_type", + message=gd_query.GqlQuery, ) @@ -146,8 +184,16 @@ class RunQueryResponse(proto.Message): was set. """ - batch = proto.Field(proto.MESSAGE, number=1, message=gd_query.QueryResultBatch,) - query = proto.Field(proto.MESSAGE, number=2, message=gd_query.Query,) + batch = proto.Field( + proto.MESSAGE, + number=1, + message=gd_query.QueryResultBatch, + ) + query = proto.Field( + proto.MESSAGE, + number=2, + message=gd_query.Query, + ) class BeginTransactionRequest(proto.Message): @@ -162,9 +208,14 @@ class BeginTransactionRequest(proto.Message): Options for a new transaction. """ - project_id = proto.Field(proto.STRING, number=8,) + project_id = proto.Field( + proto.STRING, + number=8, + ) transaction_options = proto.Field( - proto.MESSAGE, number=10, message="TransactionOptions", + proto.MESSAGE, + number=10, + message="TransactionOptions", ) @@ -177,7 +228,10 @@ class BeginTransactionResponse(proto.Message): The transaction identifier (always present). """ - transaction = proto.Field(proto.BYTES, number=1,) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackRequest(proto.Message): @@ -193,8 +247,14 @@ class RollbackRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ - project_id = proto.Field(proto.STRING, number=8,) - transaction = proto.Field(proto.BYTES, number=1,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackResponse(proto.Message): @@ -248,10 +308,25 @@ class Mode(proto.Enum): TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 - project_id = proto.Field(proto.STRING, number=8,) - mode = proto.Field(proto.ENUM, number=5, enum=Mode,) - transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector",) - mutations = proto.RepeatedField(proto.MESSAGE, number=6, message="Mutation",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + mode = proto.Field( + proto.ENUM, + number=5, + enum=Mode, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + oneof="transaction_selector", + ) + mutations = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Mutation", + ) class CommitResponse(proto.Message): @@ -269,9 +344,14 @@ class CommitResponse(proto.Message): """ mutation_results = proto.RepeatedField( - proto.MESSAGE, number=3, message="MutationResult", + proto.MESSAGE, + number=3, + message="MutationResult", + ) + index_updates = proto.Field( + proto.INT32, + number=4, ) - index_updates = proto.Field(proto.INT32, number=4,) class AllocateIdsRequest(proto.Message): @@ -288,8 +368,15 @@ class AllocateIdsRequest(proto.Message): reserved/read-only. """ - project_id = proto.Field(proto.STRING, number=8,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class AllocateIdsResponse(proto.Message): @@ -303,7 +390,11 @@ class AllocateIdsResponse(proto.Message): with a newly allocated ID. """ - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsRequest(proto.Message): @@ -323,9 +414,19 @@ class ReserveIdsRequest(proto.Message): auto-allocated. """ - project_id = proto.Field(proto.STRING, number=8,) - database_id = proto.Field(proto.STRING, number=9,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + database_id = proto.Field( + proto.STRING, + number=9, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsResponse(proto.Message): @@ -380,19 +481,33 @@ class Mutation(proto.Message): """ insert = proto.Field( - proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=4, + oneof="operation", + message=entity.Entity, ) update = proto.Field( - proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=5, + oneof="operation", + message=entity.Entity, ) upsert = proto.Field( - proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=6, + oneof="operation", + message=entity.Entity, ) delete = proto.Field( - proto.MESSAGE, number=7, oneof="operation", message=entity.Key, + proto.MESSAGE, + number=7, + oneof="operation", + message=entity.Key, ) base_version = proto.Field( - proto.INT64, number=8, oneof="conflict_detection_strategy", + proto.INT64, + number=8, + oneof="conflict_detection_strategy", ) @@ -418,9 +533,19 @@ class MutationResult(proto.Message): strategy field is not set in the mutation. """ - key = proto.Field(proto.MESSAGE, number=3, message=entity.Key,) - version = proto.Field(proto.INT64, number=4,) - conflict_detected = proto.Field(proto.BOOL, number=5,) + key = proto.Field( + proto.MESSAGE, + number=3, + message=entity.Key, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + conflict_detected = proto.Field( + proto.BOOL, + number=5, + ) class ReadOptions(proto.Message): @@ -454,9 +579,16 @@ class ReadConsistency(proto.Enum): EVENTUAL = 2 read_consistency = proto.Field( - proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, + proto.ENUM, + number=1, + oneof="consistency_type", + enum=ReadConsistency, + ) + transaction = proto.Field( + proto.BYTES, + number=2, + oneof="consistency_type", ) - transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type",) class TransactionOptions(proto.Message): @@ -496,14 +628,26 @@ class ReadWrite(proto.Message): being retried. """ - previous_transaction = proto.Field(proto.BYTES, number=1,) + previous_transaction = proto.Field( + proto.BYTES, + number=1, + ) class ReadOnly(proto.Message): - r"""Options specific to read-only transactions. - """ + r"""Options specific to read-only transactions.""" - read_write = proto.Field(proto.MESSAGE, number=1, oneof="mode", message=ReadWrite,) - read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) + read_write = proto.Field( + proto.MESSAGE, + number=1, + oneof="mode", + message=ReadWrite, + ) + read_only = proto.Field( + proto.MESSAGE, + number=2, + oneof="mode", + message=ReadOnly, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index 62daa0e7..a9371a98 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -22,7 +22,13 @@ __protobuf__ = proto.module( package="google.datastore.v1", - manifest={"PartitionId", "Key", "ArrayValue", "Value", "Entity",}, + manifest={ + "PartitionId", + "Key", + "ArrayValue", + "Value", + "Entity", + }, ) @@ -56,8 +62,14 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field(proto.STRING, number=2,) - namespace_id = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=2, + ) + namespace_id = proto.Field( + proto.STRING, + number=4, + ) class Key(proto.Message): @@ -127,12 +139,31 @@ class PathElement(proto.Message): This field is a member of `oneof`_ ``id_type``. """ - kind = proto.Field(proto.STRING, number=1,) - id = proto.Field(proto.INT64, number=2, oneof="id_type",) - name = proto.Field(proto.STRING, number=3, oneof="id_type",) - - partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) + kind = proto.Field( + proto.STRING, + number=1, + ) + id = proto.Field( + proto.INT64, + number=2, + oneof="id_type", + ) + name = proto.Field( + proto.STRING, + number=3, + oneof="id_type", + ) + + partition_id = proto.Field( + proto.MESSAGE, + number=1, + message="PartitionId", + ) + path = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) class ArrayValue(proto.Message): @@ -145,7 +176,11 @@ class ArrayValue(proto.Message): 'exclude_from_indexes'. """ - values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) + values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) class Value(proto.Message): @@ -226,28 +261,74 @@ class Value(proto.Message): """ null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, + proto.ENUM, + number=11, + oneof="value_type", + enum=struct_pb2.NullValue, + ) + boolean_value = proto.Field( + proto.BOOL, + number=1, + oneof="value_type", + ) + integer_value = proto.Field( + proto.INT64, + number=2, + oneof="value_type", + ) + double_value = proto.Field( + proto.DOUBLE, + number=3, + oneof="value_type", ) - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type",) - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type",) - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type",) timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + oneof="value_type", + message=timestamp_pb2.Timestamp, + ) + key_value = proto.Field( + proto.MESSAGE, + number=5, + oneof="value_type", + message="Key", + ) + string_value = proto.Field( + proto.STRING, + number=17, + oneof="value_type", + ) + blob_value = proto.Field( + proto.BYTES, + number=18, + oneof="value_type", ) - key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message="Key",) - string_value = proto.Field(proto.STRING, number=17, oneof="value_type",) - blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type",) geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, + proto.MESSAGE, + number=8, + oneof="value_type", + message=latlng_pb2.LatLng, ) entity_value = proto.Field( - proto.MESSAGE, number=6, oneof="value_type", message="Entity", + proto.MESSAGE, + number=6, + oneof="value_type", + message="Entity", ) array_value = proto.Field( - proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", + proto.MESSAGE, + number=9, + oneof="value_type", + message="ArrayValue", + ) + meaning = proto.Field( + proto.INT32, + number=14, + ) + exclude_from_indexes = proto.Field( + proto.BOOL, + number=19, ) - meaning = proto.Field(proto.INT32, number=14,) - exclude_from_indexes = proto.Field(proto.BOOL, number=19,) class Entity(proto.Message): @@ -273,8 +354,17 @@ class Entity(proto.Message): characters. The name cannot be ``""``. """ - key = proto.Field(proto.MESSAGE, number=1, message="Key",) - properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="Value",) + key = proto.Field( + proto.MESSAGE, + number=1, + message="Key", + ) + properties = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="Value", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 920d39b7..7c368c57 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -75,9 +75,19 @@ class ResultType(proto.Enum): PROJECTION = 2 KEY_ONLY = 3 - entity = proto.Field(proto.MESSAGE, number=1, message=gd_entity.Entity,) - version = proto.Field(proto.INT64, number=4,) - cursor = proto.Field(proto.BYTES, number=3,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=gd_entity.Entity, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + ) class Query(proto.Message): @@ -122,17 +132,48 @@ class Query(proto.Message): Must be >= 0 if specified. """ - projection = proto.RepeatedField(proto.MESSAGE, number=2, message="Projection",) - kind = proto.RepeatedField(proto.MESSAGE, number=3, message="KindExpression",) - filter = proto.Field(proto.MESSAGE, number=4, message="Filter",) - order = proto.RepeatedField(proto.MESSAGE, number=5, message="PropertyOrder",) + projection = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Projection", + ) + kind = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="KindExpression", + ) + filter = proto.Field( + proto.MESSAGE, + number=4, + message="Filter", + ) + order = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="PropertyOrder", + ) distinct_on = proto.RepeatedField( - proto.MESSAGE, number=6, message="PropertyReference", + proto.MESSAGE, + number=6, + message="PropertyReference", + ) + start_cursor = proto.Field( + proto.BYTES, + number=7, + ) + end_cursor = proto.Field( + proto.BYTES, + number=8, + ) + offset = proto.Field( + proto.INT32, + number=10, + ) + limit = proto.Field( + proto.MESSAGE, + number=12, + message=wrappers_pb2.Int32Value, ) - start_cursor = proto.Field(proto.BYTES, number=7,) - end_cursor = proto.Field(proto.BYTES, number=8,) - offset = proto.Field(proto.INT32, number=10,) - limit = proto.Field(proto.MESSAGE, number=12, message=wrappers_pb2.Int32Value,) class KindExpression(proto.Message): @@ -143,7 +184,10 @@ class KindExpression(proto.Message): The name of the kind. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class PropertyReference(proto.Message): @@ -156,7 +200,10 @@ class PropertyReference(proto.Message): a property name path. """ - name = proto.Field(proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=2, + ) class Projection(proto.Message): @@ -167,7 +214,11 @@ class Projection(proto.Message): The property to project. """ - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) class PropertyOrder(proto.Message): @@ -186,8 +237,16 @@ class Direction(proto.Enum): ASCENDING = 1 DESCENDING = 2 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - direction = proto.Field(proto.ENUM, number=2, enum=Direction,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum=Direction, + ) class Filter(proto.Message): @@ -212,10 +271,16 @@ class Filter(proto.Message): """ composite_filter = proto.Field( - proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", + proto.MESSAGE, + number=1, + oneof="filter_type", + message="CompositeFilter", ) property_filter = proto.Field( - proto.MESSAGE, number=2, oneof="filter_type", message="PropertyFilter", + proto.MESSAGE, + number=2, + oneof="filter_type", + message="PropertyFilter", ) @@ -236,8 +301,16 @@ class Operator(proto.Enum): OPERATOR_UNSPECIFIED = 0 AND = 1 - op = proto.Field(proto.ENUM, number=1, enum=Operator,) - filters = proto.RepeatedField(proto.MESSAGE, number=2, message="Filter",) + op = proto.Field( + proto.ENUM, + number=1, + enum=Operator, + ) + filters = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Filter", + ) class PropertyFilter(proto.Message): @@ -265,9 +338,21 @@ class Operator(proto.Enum): HAS_ANCESTOR = 11 NOT_IN = 13 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - op = proto.Field(proto.ENUM, number=2, enum=Operator,) - value = proto.Field(proto.MESSAGE, number=3, message=gd_entity.Value,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + op = proto.Field( + proto.ENUM, + number=2, + enum=Operator, + ) + value = proto.Field( + proto.MESSAGE, + number=3, + message=gd_entity.Value, + ) class GqlQuery(proto.Message): @@ -300,13 +385,24 @@ class GqlQuery(proto.Message): true. """ - query_string = proto.Field(proto.STRING, number=1,) - allow_literals = proto.Field(proto.BOOL, number=2,) + query_string = proto.Field( + proto.STRING, + number=1, + ) + allow_literals = proto.Field( + proto.BOOL, + number=2, + ) named_bindings = proto.MapField( - proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", + proto.STRING, + proto.MESSAGE, + number=5, + message="GqlQueryParameter", ) positional_bindings = proto.RepeatedField( - proto.MESSAGE, number=4, message="GqlQueryParameter", + proto.MESSAGE, + number=4, + message="GqlQueryParameter", ) @@ -333,9 +429,16 @@ class GqlQueryParameter(proto.Message): """ value = proto.Field( - proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, + proto.MESSAGE, + number=2, + oneof="parameter_type", + message=gd_entity.Value, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + oneof="parameter_type", ) - cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type",) class QueryResultBatch(proto.Message): @@ -380,17 +483,37 @@ class MoreResultsType(proto.Enum): MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - skipped_results = proto.Field(proto.INT32, number=6,) - skipped_cursor = proto.Field(proto.BYTES, number=3,) + skipped_results = proto.Field( + proto.INT32, + number=6, + ) + skipped_cursor = proto.Field( + proto.BYTES, + number=3, + ) entity_result_type = proto.Field( - proto.ENUM, number=1, enum="EntityResult.ResultType", + proto.ENUM, + number=1, + enum="EntityResult.ResultType", ) entity_results = proto.RepeatedField( - proto.MESSAGE, number=2, message="EntityResult", + proto.MESSAGE, + number=2, + message="EntityResult", + ) + end_cursor = proto.Field( + proto.BYTES, + number=4, + ) + more_results = proto.Field( + proto.ENUM, + number=5, + enum=MoreResultsType, + ) + snapshot_version = proto.Field( + proto.INT64, + number=7, ) - end_cursor = proto.Field(proto.BYTES, number=4,) - more_results = proto.Field(proto.ENUM, number=5, enum=MoreResultsType,) - snapshot_version = proto.Field(proto.INT64, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index c8cc8070..b8b09a16 100644 --- a/noxfile.py +++ b/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -59,7 +59,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -69,7 +71,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 61f8c1f0..b0547f83 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -34,7 +34,9 @@ def datastore_client(test_namespace): if _helpers.EMULATOR_DATASET is not None: http = requests.Session() # Un-authorized. return datastore.Client( - project=_helpers.EMULATOR_DATASET, namespace=test_namespace, _http=http, + project=_helpers.EMULATOR_DATASET, + namespace=test_namespace, + _http=http, ) else: return datastore.Client(namespace=test_namespace) diff --git a/tests/system/test_allocate_reserve_ids.py b/tests/system/test_allocate_reserve_ids.py index 8c40538f..f934d067 100644 --- a/tests/system/test_allocate_reserve_ids.py +++ b/tests/system/test_allocate_reserve_ids.py @@ -18,7 +18,8 @@ def test_client_allocate_ids(datastore_client): num_ids = 10 allocated_keys = datastore_client.allocate_ids( - datastore_client.key("Kind"), num_ids, + datastore_client.key("Kind"), + num_ids, ) assert len(allocated_keys) == num_ids diff --git a/tests/system/test_put.py b/tests/system/test_put.py index 5e884cf3..2f8de3a0 100644 --- a/tests/system/test_put.py +++ b/tests/system/test_put.py @@ -29,10 +29,10 @@ def parent_key(datastore_client): def _get_post(datastore_client, id_or_name=None, post_content=None): post_content = post_content or { - "title": u"How to make the perfect pizza in your grill", - "tags": [u"pizza", u"grill"], + "title": "How to make the perfect pizza in your grill", + "tags": ["pizza", "grill"], "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 400, "rating": 5.0, @@ -77,15 +77,18 @@ def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): entities_to_delete.append(entity1) second_post_content = { - "title": u"How to make the perfect homemade pasta", - "tags": [u"pasta", u"homemade"], + "title": "How to make the perfect homemade pasta", + "tags": ["pasta", "homemade"], "publishedAt": datetime.datetime(2001, 1, 1), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 450, "rating": 4.5, } - entity2 = _get_post(datastore_client, post_content=second_post_content,) + entity2 = _get_post( + datastore_client, + post_content=second_post_content, + ) xact.put(entity2) # Register entity to be deleted. entities_to_delete.append(entity2) @@ -111,7 +114,7 @@ def test_client_put_w_all_value_types(datastore_client, entities_to_delete): entity["truthy"] = True entity["float"] = 2.718281828 entity["int"] = 3735928559 - entity["words"] = u"foo" + entity["words"] = "foo" entity["blob"] = b"seekretz" entity_stored = datastore.Entity(key=key_stored) entity_stored["hi"] = "bye" @@ -133,7 +136,7 @@ def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_dele parent_key = datastore_client.key("Residence", "NewYork") key = datastore_client.key("Person", "name", parent=parent_key) entity = datastore.Entity(key=key) - entity["fullName"] = u"Full name" + entity["fullName"] = "Full name" entity["linkedTo"] = key # Self reference. datastore_client.put(entity) diff --git a/tests/system/test_query.py b/tests/system/test_query.py index c5921bc9..499bc507 100644 --- a/tests/system/test_query.py +++ b/tests/system/test_query.py @@ -301,7 +301,8 @@ def test_query_distinct_on(ancestor_query): @pytest.fixture(scope="session") def large_query_client(datastore_client): large_query_client = _helpers.clone_client( - datastore_client, namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, + datastore_client, + namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, ) # Populate the datastore if necessary. populate_datastore.add_large_character_entities(client=large_query_client) @@ -322,11 +323,23 @@ def large_query(large_query_client): "limit,offset,expected", [ # with no offset there are the correct # of results - (None, None, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS,), + ( + None, + None, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS, + ), # with no limit there are results (offset provided) - (None, 900, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900,), + ( + None, + 900, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900, + ), # Offset beyond items larger: verify 200 items found - (200, 1100, 200,), + ( + 200, + 1100, + 200, + ), # offset within range, expect 50 despite larger limit") (100, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 50, 50), # Offset beyond items larger Verify no items found") diff --git a/tests/system/test_transaction.py b/tests/system/test_transaction.py index d27bc439..b380561f 100644 --- a/tests/system/test_transaction.py +++ b/tests/system/test_transaction.py @@ -23,7 +23,7 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): key = datastore_client.key("Company", "Google") entity = datastore.Entity(key=key) - entity["url"] = u"www.google.com" + entity["url"] = "www.google.com" with datastore_client.transaction() as xact: result = datastore_client.get(entity.key) @@ -39,7 +39,8 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): def test_transaction_via_explicit_begin_get_commit( - datastore_client, entities_to_delete, + datastore_client, + entities_to_delete, ): # See # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 @@ -87,7 +88,7 @@ def test_failure_with_contention(datastore_client, entities_to_delete): # and updated outside it with a contentious value. key = local_client.key("BreakTxn", 1234) orig_entity = datastore.Entity(key=key) - orig_entity["foo"] = u"bar" + orig_entity["foo"] = "bar" local_client.put(orig_entity) entities_to_delete.append(orig_entity) @@ -97,10 +98,10 @@ def test_failure_with_contention(datastore_client, entities_to_delete): entity_in_txn = local_client.get(key) # Update the original entity outside the transaction. - orig_entity[contention_prop_name] = u"outside" + orig_entity[contention_prop_name] = "outside" datastore_client.put(orig_entity) # Try to update the entity which we already updated outside the # transaction. - entity_in_txn[contention_prop_name] = u"inside" + entity_in_txn[contention_prop_name] = "inside" txn.put(entity_in_txn) diff --git a/tests/system/utils/populate_datastore.py b/tests/system/utils/populate_datastore.py index 52f453f6..47395070 100644 --- a/tests/system/utils/populate_datastore.py +++ b/tests/system/utils/populate_datastore.py @@ -40,19 +40,19 @@ EDDARD + ("Character", "Jon Snow"), ) CHARACTERS = ( - {"name": u"Rickard", "family": u"Stark", "appearances": 0, "alive": False}, - {"name": u"Eddard", "family": u"Stark", "appearances": 9, "alive": False}, + {"name": "Rickard", "family": "Stark", "appearances": 0, "alive": False}, + {"name": "Eddard", "family": "Stark", "appearances": 9, "alive": False}, { - "name": u"Catelyn", - "family": [u"Stark", u"Tully"], + "name": "Catelyn", + "family": ["Stark", "Tully"], "appearances": 26, "alive": False, }, - {"name": u"Arya", "family": u"Stark", "appearances": 33, "alive": True}, - {"name": u"Sansa", "family": u"Stark", "appearances": 31, "alive": True}, - {"name": u"Robb", "family": u"Stark", "appearances": 22, "alive": False}, - {"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True}, - {"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True}, + {"name": "Arya", "family": "Stark", "appearances": 33, "alive": True}, + {"name": "Sansa", "family": "Stark", "appearances": 31, "alive": True}, + {"name": "Robb", "family": "Stark", "appearances": 22, "alive": False}, + {"name": "Bran", "family": "Stark", "appearances": 25, "alive": True}, + {"name": "Jon Snow", "family": "Stark", "appearances": 32, "alive": True}, ) LARGE_CHARACTER_TOTAL_OBJECTS = 2500 LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index e6ed5508..7e678103 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -95,7 +95,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class", + [ + DatastoreAdminClient, + DatastoreAdminAsyncClient, + ], ) def test_datastore_admin_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -137,7 +141,11 @@ def test_datastore_admin_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class", + [ + DatastoreAdminClient, + DatastoreAdminAsyncClient, + ], ) def test_datastore_admin_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -501,7 +509,9 @@ def test_datastore_admin_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -644,10 +654,17 @@ def test_datastore_admin_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore_admin.ExportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ExportEntitiesRequest, + dict, + ], +) def test_export_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -673,7 +690,8 @@ def test_export_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -689,7 +707,8 @@ async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -719,7 +738,9 @@ async def test_export_entities_async_from_dict(): def test_export_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: @@ -753,7 +774,9 @@ def test_export_entities_flattened(): def test_export_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -826,10 +849,17 @@ async def test_export_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.ImportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ImportEntitiesRequest, + dict, + ], +) def test_import_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -855,7 +885,8 @@ def test_import_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -871,7 +902,8 @@ async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -901,7 +933,9 @@ async def test_import_entities_async_from_dict(): def test_import_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: @@ -935,7 +969,9 @@ def test_import_entities_flattened(): def test_import_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1008,10 +1044,17 @@ async def test_import_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.CreateIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.CreateIndexRequest, + dict, + ], +) def test_create_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1037,7 +1080,8 @@ def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1053,7 +1097,8 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1082,10 +1127,17 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.DeleteIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.DeleteIndexRequest, + dict, + ], +) def test_delete_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1111,7 +1163,8 @@ def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1127,7 +1180,8 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1156,10 +1210,17 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.GetIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.GetIndexRequest, + dict, + ], +) def test_get_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1196,7 +1257,8 @@ def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1212,7 +1274,8 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1252,10 +1315,17 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.ListIndexesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ListIndexesRequest, + dict, + ], +) def test_list_indexes(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1284,7 +1354,8 @@ def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1300,7 +1371,8 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1334,7 +1406,8 @@ async def test_list_indexes_async_from_dict(): def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1342,15 +1415,28 @@ def test_list_indexes_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token="ghi", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1367,7 +1453,8 @@ def test_list_indexes_pager(transport_name: str = "grpc"): def test_list_indexes_pages(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1375,15 +1462,28 @@ def test_list_indexes_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1394,7 +1494,9 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1403,19 +1505,34 @@ async def test_list_indexes_async_pager(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) - async_pager = await client.list_indexes(request={},) + async_pager = await client.list_indexes( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1427,7 +1544,9 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1436,15 +1555,28 @@ async def test_list_indexes_async_pages(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1462,7 +1594,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1482,7 +1615,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreAdminClient(client_options=options, transport=transport,) + client = DatastoreAdminClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1498,7 +1634,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1543,8 +1680,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreAdminGrpcTransport,) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreAdminGrpcTransport, + ) def test_datastore_admin_base_transport_error(): @@ -1599,7 +1741,8 @@ def test_datastore_admin_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreAdminTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1769,7 +1912,8 @@ def test_datastore_admin_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1781,7 +1925,8 @@ def test_datastore_admin_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1890,12 +2035,16 @@ def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): def test_datastore_admin_grpc_lro_client(): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1903,12 +2052,16 @@ def test_datastore_admin_grpc_lro_client(): def test_datastore_admin_grpc_lro_async_client(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1936,7 +2089,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreAdminClient.common_folder_path(folder) assert expected == actual @@ -1954,7 +2109,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreAdminClient.common_organization_path(organization) assert expected == actual @@ -1972,7 +2129,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreAdminClient.common_project_path(project) assert expected == actual @@ -1992,7 +2151,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreAdminClient.common_location_path(project, location) assert expected == actual @@ -2017,7 +2177,8 @@ def test_client_with_default_client_info(): transports.DatastoreAdminTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2026,7 +2187,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreAdminClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2034,7 +2196,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index fee5a408..445f96fa 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -84,7 +84,13 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + DatastoreClient, + DatastoreAsyncClient, + ], +) def test_datastore_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -124,7 +130,13 @@ def test_datastore_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + DatastoreClient, + DatastoreAsyncClient, + ], +) def test_datastore_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -477,7 +489,9 @@ def test_datastore_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -608,10 +622,17 @@ def test_datastore_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore.LookupRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.LookupRequest, + dict, + ], +) def test_lookup(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -637,7 +658,8 @@ def test_lookup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -653,7 +675,8 @@ async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -683,7 +706,9 @@ async def test_lookup_async_from_dict(): def test_lookup_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -723,7 +748,9 @@ def test_lookup_flattened(): def test_lookup_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -744,7 +771,9 @@ def test_lookup_flattened_error(): @pytest.mark.asyncio async def test_lookup_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -789,7 +818,9 @@ async def test_lookup_flattened_async(): @pytest.mark.asyncio async def test_lookup_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -808,10 +839,17 @@ async def test_lookup_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RunQueryRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunQueryRequest, + dict, + ], +) def test_run_query(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -837,7 +875,8 @@ def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -853,7 +892,8 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -882,10 +922,17 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore.BeginTransactionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.BeginTransactionRequest, + dict, + ], +) def test_begin_transaction(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -916,7 +963,8 @@ def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -934,7 +982,8 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -947,7 +996,9 @@ async def test_begin_transaction_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.BeginTransactionResponse(transaction=b"transaction_blob",) + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) ) response = await client.begin_transaction(request) @@ -967,7 +1018,9 @@ async def test_begin_transaction_async_from_dict(): def test_begin_transaction_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -977,7 +1030,9 @@ def test_begin_transaction_flattened(): call.return_value = datastore.BeginTransactionResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.begin_transaction(project_id="project_id_value",) + client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -989,19 +1044,24 @@ def test_begin_transaction_flattened(): def test_begin_transaction_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1015,7 +1075,9 @@ async def test_begin_transaction_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.begin_transaction(project_id="project_id_value",) + response = await client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1028,20 +1090,30 @@ async def test_begin_transaction_flattened_async(): @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) -@pytest.mark.parametrize("request_type", [datastore.CommitRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.CommitRequest, + dict, + ], +) def test_commit(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1051,7 +1123,9 @@ def test_commit(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse(index_updates=1389,) + call.return_value = datastore.CommitResponse( + index_updates=1389, + ) response = client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -1068,7 +1142,8 @@ def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1084,7 +1159,8 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1095,7 +1171,9 @@ async def test_commit_async( with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.CommitResponse(index_updates=1389,) + datastore.CommitResponse( + index_updates=1389, + ) ) response = await client.commit(request) @@ -1115,7 +1193,9 @@ async def test_commit_async_from_dict(): def test_commit_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1165,7 +1245,9 @@ def test_commit_flattened(): def test_commit_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1191,7 +1273,9 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1246,7 +1330,9 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1270,10 +1356,17 @@ async def test_commit_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RollbackRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RollbackRequest, + dict, + ], +) def test_rollback(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1299,7 +1392,8 @@ def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1315,7 +1409,8 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1345,7 +1440,9 @@ async def test_rollback_async_from_dict(): def test_rollback_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1354,7 +1451,8 @@ def test_rollback_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1370,7 +1468,9 @@ def test_rollback_flattened(): def test_rollback_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1384,7 +1484,9 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1397,7 +1499,8 @@ async def test_rollback_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1414,7 +1517,9 @@ async def test_rollback_flattened_async(): @pytest.mark.asyncio async def test_rollback_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1426,10 +1531,17 @@ async def test_rollback_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.AllocateIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.AllocateIdsRequest, + dict, + ], +) def test_allocate_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1455,7 +1567,8 @@ def test_allocate_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1471,7 +1584,8 @@ async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1501,7 +1615,9 @@ async def test_allocate_ids_async_from_dict(): def test_allocate_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1533,7 +1649,9 @@ def test_allocate_ids_flattened(): def test_allocate_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1551,7 +1669,9 @@ def test_allocate_ids_flattened_error(): @pytest.mark.asyncio async def test_allocate_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1588,7 +1708,9 @@ async def test_allocate_ids_flattened_async(): @pytest.mark.asyncio async def test_allocate_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1604,10 +1726,17 @@ async def test_allocate_ids_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.ReserveIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.ReserveIdsRequest, + dict, + ], +) def test_reserve_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1633,7 +1762,8 @@ def test_reserve_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1649,7 +1779,8 @@ async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1679,7 +1810,9 @@ async def test_reserve_ids_async_from_dict(): def test_reserve_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1711,7 +1844,9 @@ def test_reserve_ids_flattened(): def test_reserve_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1729,7 +1864,9 @@ def test_reserve_ids_flattened_error(): @pytest.mark.asyncio async def test_reserve_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1766,7 +1903,9 @@ async def test_reserve_ids_flattened_async(): @pytest.mark.asyncio async def test_reserve_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1789,7 +1928,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1809,7 +1949,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreClient(client_options=options, transport=transport,) + client = DatastoreClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1825,7 +1968,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1855,7 +1999,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1867,8 +2014,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreGrpcTransport,) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreGrpcTransport, + ) def test_datastore_base_transport_error(): @@ -1919,7 +2071,8 @@ def test_datastore_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1960,7 +2113,10 @@ def test_datastore_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_datastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -2083,7 +2239,8 @@ def test_datastore_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2095,7 +2252,8 @@ def test_datastore_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2216,7 +2374,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreClient.common_folder_path(folder) assert expected == actual @@ -2234,7 +2394,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreClient.common_organization_path(organization) assert expected == actual @@ -2252,7 +2414,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreClient.common_project_path(project) assert expected == actual @@ -2272,7 +2436,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreClient.common_location_path(project, location) assert expected == actual @@ -2297,7 +2462,8 @@ def test_client_with_default_client_info(): transports.DatastoreTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2306,7 +2472,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2314,7 +2481,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index e7f0b690..b72a68b5 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -48,7 +48,9 @@ def test_live_api(make_chan, mock_transport, mock_klass): mock_transport.assert_called_once_with(channel=mock.sentinel.channel) make_chan.assert_called_once_with( - mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443", + mock.sentinel.credentials, + DEFAULT_USER_AGENT, + "datastore.googleapis.com:443", ) mock_klass.assert_called_once_with( diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index 67f28ffe..a03397d5 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -240,7 +240,11 @@ def test_api_ctor(): def _lookup_single_helper( - read_consistency=None, transaction=None, empty=True, retry=None, timeout=None, + read_consistency=None, + transaction=None, + empty=True, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -295,7 +299,11 @@ def _lookup_single_helper( uri = _build_expected_url(client._base_url, project, "lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) if retry is not None: @@ -336,7 +344,11 @@ def test_api_lookup_single_key_hit_w_timeout(): def _lookup_multiple_helper( - found=0, missing=0, deferred=0, retry=None, timeout=None, + found=0, + missing=0, + deferred=0, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -399,7 +411,11 @@ def _lookup_multiple_helper( uri = _build_expected_url(client._base_url, project, "lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) assert list(request.keys) == [key_pb1._pb, key_pb2._pb] assert request.read_options == read_options._pb @@ -499,7 +515,11 @@ def _run_query_helper( uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RunQueryRequest(), + retry=retry, + timeout=timeout, ) assert request.partition_id == partition_id._pb assert request.query == query_pb._pb @@ -615,7 +635,7 @@ def _commit_helper(transaction=None, retry=None, timeout=None): insert = mutation.upsert insert.key.CopyFrom(key_pb._pb) value_pb = _new_value_pb(insert, "foo") - value_pb.string_value = u"Foo" + value_pb.string_value = "Foo" http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] @@ -647,7 +667,11 @@ def _commit_helper(transaction=None, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call( - http, uri, rq_class(), retry=retry, timeout=timeout, + http, + uri, + rq_class(), + retry=retry, + timeout=timeout, ) assert list(request.mutations) == [mutation] assert request.mode == mode @@ -709,7 +733,11 @@ def _rollback_helper(retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "rollback") request = _verify_protobuf_call( - http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RollbackRequest(), + retry=retry, + timeout=timeout, ) assert request.transaction == transaction @@ -765,7 +793,11 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "allocateIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): @@ -822,7 +854,11 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "reserveIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index da253deb..51cddb6a 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -130,7 +130,8 @@ def test_client_ctor_w_implicit_inputs(): other = "other" patch1 = mock.patch( - "google.cloud.datastore.client._determine_default_project", return_value=other, + "google.cloud.datastore.client._determine_default_project", + return_value=other, ) creds = _make_credentials() @@ -151,7 +152,9 @@ def test_client_ctor_w_implicit_inputs(): assert client.current_batch is None assert client.current_transaction is None - default.assert_called_once_with(scopes=Client.SCOPE,) + default.assert_called_once_with( + scopes=Client.SCOPE, + ) _determine_default_project.assert_called_once_with(None) @@ -258,7 +261,10 @@ def test_client_base_url_property_w_client_options(): creds = _make_credentials() client_options = {"api_endpoint": "endpoint"} - client = _make_client(credentials=creds, client_options=client_options,) + client = _make_client( + credentials=creds, + client_options=client_options, + ) assert client.base_url == "endpoint" client.base_url = alternate_url @@ -784,7 +790,7 @@ def test_client_put_multi_w_single_empty_entity(): def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key(_Key.kind, None) retry = mock.Mock() timeout = 100000 @@ -817,13 +823,13 @@ def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_put_multi_existing_batch_w_completed_key(): creds = _make_credentials() client = _make_client(credentials=creds) - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key() with _NoCommitBatch(client) as CURR_BATCH: @@ -837,7 +843,7 @@ def test_client_put_multi_existing_batch_w_completed_key(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_delete(): diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 4c1861a2..a8477f2d 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -131,7 +131,7 @@ def test_entity_from_protobuf_w_entity_with_meaning(): name = "hello" value_pb = _new_value_pb(entity_pb, name) value_pb.meaning = meaning = 9 - value_pb.string_value = val = u"something" + value_pb.string_value = val = "something" entity = entity_from_protobuf(entity_pb) assert entity.key is None @@ -249,7 +249,7 @@ def test_enity_to_protobf_w_simple_fields(): name1 = "foo" entity[name1] = value1 = 42 name2 = "bar" - entity[name2] = value2 = u"some-string" + entity[name2] = value2 = "some-string" entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity() @@ -299,7 +299,7 @@ def test_enity_to_protobf_w_inverts_to_protobuf(): val_pb1.exclude_from_indexes = True # Add a string property. val_pb2 = _new_value_pb(original_pb, "bar") - val_pb2.string_value = u"hello" + val_pb2.string_value = "hello" # Add a nested (entity) property. val_pb3 = _new_value_pb(original_pb, "entity-baz") @@ -386,7 +386,7 @@ def test_enity_to_protobf_w_dict_to_entity(): from google.cloud.datastore.helpers import entity_to_protobuf entity = Entity() - entity["a"] = {"b": u"c"} + entity["a"] = {"b": "c"} entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity( @@ -624,9 +624,9 @@ def test__pb_attr_value_w_bytes(): def test__pb_attr_value_w_unicode(): from google.cloud.datastore.helpers import _pb_attr_value - name, value = _pb_attr_value(u"str") + name, value = _pb_attr_value("str") assert name == "string_value" - assert value == u"str" + assert value == "str" def test__pb_attr_value_w_entity(): @@ -758,8 +758,8 @@ def test__get_value_from_value_pb_w_bytes(): def test__get_value_from_value_pb_w_unicode(): from google.cloud.datastore.helpers import _get_value_from_value_pb - value = _make_value_pb("string_value", u"str") - assert _get_value_from_value_pb(value._pb) == u"str" + value = _make_value_pb("string_value", "str") + assert _get_value_from_value_pb(value._pb) == "str" def test__get_value_from_value_pb_w_entity(): @@ -929,9 +929,9 @@ def test__set_protobuf_value_w_unicode(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - _set_protobuf_value(pb, u"str") + _set_protobuf_value(pb, "str") value = pb.string_value - assert value == u"str" + assert value == "str" def test__set_protobuf_value_w_entity_empty_wo_key(): @@ -952,7 +952,7 @@ def test__set_protobuf_value_w_entity_w_key(): from google.cloud.datastore.helpers import _set_protobuf_value name = "foo" - value = u"Foo" + value = "Foo" pb = _make_empty_value_pb() key = Key("KIND", 123, project="PROJECT") entity = Entity(key=key) @@ -971,7 +971,7 @@ def test__set_protobuf_value_w_array(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - values = [u"a", 0, 3.14] + values = ["a", 0, 3.14] _set_protobuf_value(pb, values) marshalled = pb.array_value.values assert len(marshalled) == len(values) @@ -1009,7 +1009,7 @@ def test__get_meaning_w_single(): value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 - value_pb.string_value = u"hi" + value_pb.string_value = "hi" result = _get_meaning(value_pb) assert meaning == result @@ -1036,8 +1036,8 @@ def test__get_meaning_w_array_value(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = sub_value_pb2.meaning = meaning - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert meaning == result @@ -1055,8 +1055,8 @@ def test__get_meaning_w_array_value_multiple_meanings(): sub_value_pb1.meaning = meaning1 sub_value_pb2.meaning = meaning2 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, meaning2] @@ -1072,8 +1072,8 @@ def test__get_meaning_w_array_value_meaning_partially_unset(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = meaning1 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, None] diff --git a/tests/unit/test_key.py b/tests/unit/test_key.py index 2d2a88e7..575601f0 100644 --- a/tests/unit/test_key.py +++ b/tests/unit/test_key.py @@ -72,7 +72,10 @@ def test_key_ctor_parent(): {"kind": _CHILD_KIND, "id": _CHILD_ID}, ] parent_key = _make_key( - _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE, + _PARENT_KIND, + _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE, ) key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key) assert key.project == parent_key.project @@ -97,7 +100,11 @@ def test_key_ctor_parent_bad_namespace(): parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT) with pytest.raises(ValueError): _make_key( - "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT, + "KIND2", + 1234, + namespace="BAR", + parent=parent_key, + PROJECT=_DEFAULT_PROJECT, ) @@ -585,7 +592,7 @@ def test__cliean_app_w_dev_server(): def test__get_empty_w_unset(): from google.cloud.datastore.key import _get_empty - for empty_value in (u"", 0, 0.0, []): + for empty_value in ("", 0, 0.0, []): ret_val = _get_empty(empty_value, empty_value) assert ret_val is None @@ -593,7 +600,7 @@ def test__get_empty_w_unset(): def test__get_empty_w_actually_set(): from google.cloud.datastore.key import _get_empty - value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) + value_pairs = (("hello", ""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) for value, empty_value in value_pairs: ret_val = _get_empty(value, empty_value) assert ret_val is value @@ -602,7 +609,7 @@ def test__get_empty_w_actually_set(): def test__check_database_id_w_empty_value(): from google.cloud.datastore.key import _check_database_id - ret_val = _check_database_id(u"") + ret_val = _check_database_id("") # Really we are just happy there was no exception. assert ret_val is None @@ -611,7 +618,7 @@ def test__check_database_id_w_failure(): from google.cloud.datastore.key import _check_database_id with pytest.raises(ValueError): - _check_database_id(u"some-database-id") + _check_database_id("some-database-id") def test__add_id_or_name_add_id(): diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index 648ae7e4..3e78a6a3 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -144,7 +144,9 @@ def test_transaction_begin_w_retry_w_timeout(): expected_request = _make_begin_request(project) ds_api.begin_transaction.assert_called_once_with( - request=expected_request, retry=retry, timeout=timeout, + request=expected_request, + retry=retry, + timeout=timeout, ) From 1dc9480c3c170d9f2f411b8935ad4aa4b9c23432 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 16:48:25 +0000 Subject: [PATCH 03/15] chore(python): add E231 to .flake8 ignore list (#291) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.flake8 b/.flake8 index 29227d4c..2e438749 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 87dd0061..9e0a9356 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From 29300dbfb38956cf453d551d96f6d4df096d4839 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 00:22:25 +0000 Subject: [PATCH 04/15] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#292) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d --- .github/.OwlBot.lock.yaml | 3 ++- .pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9e0a9356..22cc254a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a77..46d23716 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From 97c1383fb4f2b22800e3e4f54e083cf1cbd5716f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 02:10:30 +0000 Subject: [PATCH 05/15] chore(python): Enable size-label bot (#293) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 .github/auto-label.yaml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 22cc254a..58a0b153 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 00000000..09c8d735 --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From d6569697c4e112c78d4dd5ff9173af9cc32904dc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:40:21 +0000 Subject: [PATCH 06/15] chore(python): refactor unit / system test dependency install (#294) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .github/.OwlBot.lock.yaml | 4 +- noxfile.py | 105 ++++++++++++++++++++++++++++++-------- 2 files changed, 87 insertions(+), 22 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 58a0b153..fa576229 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/noxfile.py b/noxfile.py index b8b09a16..975a93e3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,16 +20,40 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -93,23 +117,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -133,6 +175,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) @nox.parametrize("disable_grpc", [False, True]) def system(session, disable_grpc): @@ -156,13 +227,7 @@ def system(session, disable_grpc): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) env = {} if disable_grpc: From 13a13dfe2175ece685525bde75561c46970abdf6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Apr 2022 12:37:53 -0400 Subject: [PATCH 07/15] chore: allow releases on previous major versions (#295) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: allow releases on previous major versions * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .github/release-please.yml | 9 +++++++++ owlbot.py | 2 ++ 2 files changed, 11 insertions(+) diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e5..29601ad4 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,11 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/owlbot.py b/owlbot.py index fbf8c131..63214a43 100644 --- a/owlbot.py +++ b/owlbot.py @@ -107,6 +107,8 @@ def get_staging_dirs( python.py_samples(skip_readmes=True) +python.configure_previous_major_version_branches() + # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) assert 1 == s.replace( "noxfile.py", From cff6a1c264b87601c159717a07e6dcf7a4670dc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 06:59:02 -0400 Subject: [PATCH 08/15] chore(python): add license header to auto-label.yaml (#297) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index fa576229..bc893c97 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 09c8d735..41bff0b5 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From 8d2bd1788d8dc7da57ab9272b274a29082878ece Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Apr 2022 23:50:12 +0000 Subject: [PATCH 09/15] chore: Use gapic-generator-python 0.65.0 (#300) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 440970084 Source-Link: https://github.com/googleapis/googleapis/commit/5e0a3d57254ab9857ccac77fc6ffade7b69a2dc7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b0c628a3fade768f225d76992791ea1ba2a881be Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjBjNjI4YTNmYWRlNzY4ZjIyNWQ3Njk5Mjc5MWVhMWJhMmE4ODFiZSJ9 feat: expose new read_time API fields, currently only available in private preview docs: fix type in docstring for map fields PiperOrigin-RevId: 440914241 Source-Link: https://github.com/googleapis/googleapis/commit/0ed730f27474890a727a72bdc85e6d20715e2f87 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2e5ae97fd24f64af0fef1999dad14945fdc3663 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJlNWFlOTdmZDI0ZjY0YWYwZmVmMTk5OWRhZDE0OTQ1ZmRjMzY2MyJ9 --- .../services/datastore_admin/async_client.py | 21 ++--- .../services/datastore_admin/client.py | 15 ++-- .../datastore_admin/transports/base.py | 11 ++- .../datastore_admin/transports/grpc.py | 4 + .../datastore_admin_v1/types/__init__.py | 4 +- .../types/datastore_admin.py | 6 +- .../services/datastore/async_client.py | 15 ++-- .../datastore_v1/services/datastore/client.py | 6 +- .../services/datastore/transports/base.py | 14 ++-- .../services/datastore/transports/grpc.py | 4 + google/cloud/datastore_v1/types/datastore.py | 69 +++++++++++++++- google/cloud/datastore_v1/types/entity.py | 2 +- google/cloud/datastore_v1/types/query.py | 30 ++++++- .../test_datastore_admin.py | 81 ++++++++++++++----- .../unit/gapic/datastore_v1/test_datastore.py | 75 +++++++++++++---- 15 files changed, 266 insertions(+), 91 deletions(-) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index c6cd885c..122d1fe5 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -267,7 +267,7 @@ async def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -285,7 +285,6 @@ async def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -321,7 +320,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -434,7 +433,7 @@ async def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -449,7 +448,6 @@ async def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -485,7 +483,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -621,7 +619,6 @@ async def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -716,7 +713,6 @@ async def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -842,8 +838,7 @@ def sample_get_index(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -875,7 +870,6 @@ async def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -926,8 +920,7 @@ def sample_list_indexes(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 74bf49c4..8f5364a7 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -468,7 +468,7 @@ def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -486,7 +486,6 @@ def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -522,7 +521,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -634,7 +633,7 @@ def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -649,7 +648,6 @@ def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -685,7 +683,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -820,7 +818,6 @@ def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -916,7 +913,6 @@ def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -1067,7 +1063,6 @@ def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index daa2096f..0cf9ac64 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -86,6 +86,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -153,8 +154,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -168,8 +168,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -248,5 +247,9 @@ def list_indexes( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreAdminTransport",) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index ba43c4b6..e4193366 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -506,5 +506,9 @@ def list_indexes( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreAdminGrpcTransport",) diff --git a/google/cloud/datastore_admin_v1/types/__init__.py b/google/cloud/datastore_admin_v1/types/__init__.py index fbc4f65f..f194f3cf 100644 --- a/google/cloud/datastore_admin_v1/types/__init__.py +++ b/google/cloud/datastore_admin_v1/types/__init__.py @@ -31,7 +31,9 @@ Progress, OperationType, ) -from .index import Index +from .index import ( + Index, +) from .migration import ( MigrationProgressEvent, MigrationStateEvent, diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index a490fd93..82bacec1 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -64,7 +64,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Dict[str, str]): + labels (Mapping[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -141,7 +141,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -199,7 +199,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index 247a388c..a4c41543 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -34,6 +34,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient @@ -299,8 +300,7 @@ def sample_lookup(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -376,8 +376,7 @@ def sample_run_query(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -500,7 +499,6 @@ async def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -735,7 +733,6 @@ async def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -838,7 +835,6 @@ async def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 @@ -917,8 +913,7 @@ def sample_reserve_ids(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index de663367..5b012a2f 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -37,6 +37,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport @@ -689,7 +690,6 @@ def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -924,7 +924,6 @@ def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -1027,7 +1026,6 @@ def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index b50c0fca..0bf916c8 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -83,6 +83,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -130,8 +131,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -145,8 +145,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -180,8 +179,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -265,5 +263,9 @@ def reserve_ids( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreTransport",) diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index d8a2f001..16938b68 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -419,5 +419,9 @@ def reserve_ids( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreGrpcTransport",) diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index d5d974c2..f4907298 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -17,6 +17,7 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -92,6 +93,9 @@ class LookupResponse(proto.Message): resource constraints. The order of results in this field is undefined and has no relation to the order of the keys in the input. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which these entities were read or + found missing. """ found = proto.RepeatedField( @@ -109,6 +113,11 @@ class LookupResponse(proto.Message): number=3, message=entity.Key, ) + read_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) class RunQueryRequest(proto.Message): @@ -341,6 +350,9 @@ class CommitResponse(proto.Message): index_updates (int): The number of index entries updated during the commit, or zero if none were updated. + commit_time (google.protobuf.timestamp_pb2.Timestamp): + The transaction commit timestamp. Not set for + non-transactional commits. """ mutation_results = proto.RepeatedField( @@ -352,6 +364,11 @@ class CommitResponse(proto.Message): proto.INT32, number=4, ) + commit_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) class AllocateIdsRequest(proto.Message): @@ -477,6 +494,13 @@ class Mutation(proto.Message): current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity that this + mutation is being applied to. If this does not + match the current update time on the server, the + mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ @@ -509,6 +533,12 @@ class Mutation(proto.Message): number=8, oneof="conflict_detection_strategy", ) + update_time = proto.Field( + proto.MESSAGE, + number=11, + oneof="conflict_detection_strategy", + message=timestamp_pb2.Timestamp, + ) class MutationResult(proto.Message): @@ -527,6 +557,13 @@ class MutationResult(proto.Message): greater than the version of any previous entity and less than the version of any possible future entity. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity on the server + after processing the mutation. If the mutation + doesn't change anything on the server, then the + timestamp will be the update timestamp of the + current entity. This field will not be set after + a 'delete'. conflict_detected (bool): Whether a conflict was detected for this mutation. Always false when a conflict detection @@ -542,6 +579,11 @@ class MutationResult(proto.Message): proto.INT64, number=4, ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) conflict_detected = proto.Field( proto.BOOL, number=5, @@ -569,6 +611,13 @@ class ReadOptions(proto.Message): transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities as they were at the given + time. This may not be older than 270 seconds. + This value is only supported for Cloud Firestore + in Datastore mode. + This field is a member of `oneof`_ ``consistency_type``. """ @@ -589,6 +638,12 @@ class ReadConsistency(proto.Enum): number=2, oneof="consistency_type", ) + read_time = proto.Field( + proto.MESSAGE, + number=4, + oneof="consistency_type", + message=timestamp_pb2.Timestamp, + ) class TransactionOptions(proto.Message): @@ -634,7 +689,19 @@ class ReadWrite(proto.Message): ) class ReadOnly(proto.Message): - r"""Options specific to read-only transactions.""" + r"""Options specific to read-only transactions. + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities at the given time. + This may not be older than 60 seconds. + """ + + read_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) read_write = proto.Field( proto.MESSAGE, diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index a9371a98..e949a56a 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -346,7 +346,7 @@ class Entity(proto.Message): example, an entity in ``Value.entity_value`` may have no key). An entity's kind is its key path's last element's kind, or null if it has no key. - properties (Sequence[google.cloud.datastore_v1.types.Entity.PropertiesEntry]): + properties (Mapping[str, google.cloud.datastore_v1.types.Value]): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 7c368c57..1179efce 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -16,6 +16,7 @@ import proto # type: ignore from google.cloud.datastore_v1.types import entity as gd_entity +from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore @@ -56,6 +57,12 @@ class EntityResult(proto.Message): entities in ``LookupResponse``, this is the version of the snapshot that was used to look up the entity, and it is always set except for eventually consistent reads. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the entity was last changed. This field is + set for + [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] + entity results. If this entity is missing, this field will + not be set. cursor (bytes): A cursor that points to the position after the result entity. Set only when the ``EntityResult`` is part of a @@ -84,6 +91,11 @@ class ResultType(proto.Enum): proto.INT64, number=4, ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) cursor = proto.Field( proto.BYTES, number=3, @@ -368,7 +380,7 @@ class GqlQuery(proto.Message): and instead must bind all values. For example, ``SELECT * FROM Kind WHERE a = 'string literal'`` is not allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (Sequence[google.cloud.datastore_v1.types.GqlQuery.NamedBindingsEntry]): + named_bindings (Mapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): For each non-reserved named binding site in the query string, there must be a named parameter with that name, but not necessarily the inverse. @@ -473,6 +485,17 @@ class QueryResultBatch(proto.Message): Each batch's snapshot version is valid for all preceding batches. The value will be zero for eventually consistent queries. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Read timestamp this batch was returned from. This applies to + the range of results from the query's ``start_cursor`` (or + the beginning of the query if no cursor was given) to this + batch's ``end_cursor`` (not the query's ``end_cursor``). + + In a single transaction, subsequent query result batches for + the same query can have a greater timestamp. Each batch's + read timestamp is valid for all preceding batches. This + value will not be set for eventually consistent queries in + Cloud Datastore. """ class MoreResultsType(proto.Enum): @@ -514,6 +537,11 @@ class MoreResultsType(proto.Enum): proto.INT64, number=7, ) + read_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 7e678103..fd1fc14c 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -95,24 +95,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreAdminClient, - DatastoreAdminAsyncClient, + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_admin_client_from_service_account_info(client_class): +def test_datastore_admin_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -141,27 +141,31 @@ def test_datastore_admin_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreAdminClient, - DatastoreAdminAsyncClient, + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_admin_client_from_service_account_file(client_class): +def test_datastore_admin_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_admin_client_get_transport_class(): @@ -1535,7 +1539,7 @@ async def test_list_indexes_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1581,7 +1585,9 @@ async def test_list_indexes_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_indexes(request={})).pages: + async for page_ in ( + await client.list_indexes(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1678,6 +1684,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreAdminClient( @@ -1730,6 +1749,14 @@ def test_datastore_admin_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -1887,24 +1914,40 @@ def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_cl ) -def test_datastore_admin_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_no_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_admin_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_with_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_admin_grpc_transport_channel(): diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 445f96fa..4106b217 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -85,24 +85,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreClient, - DatastoreAsyncClient, + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_client_from_service_account_info(client_class): +def test_datastore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -131,27 +131,31 @@ def test_datastore_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreClient, - DatastoreAsyncClient, + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_client_from_service_account_file(client_class): +def test_datastore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_client_get_transport_class(): @@ -2012,6 +2016,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreClient( @@ -2060,6 +2077,14 @@ def test_datastore_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2214,24 +2239,40 @@ def test_datastore_grpc_transport_client_cert_source_for_mtls(transport_class): ) -def test_datastore_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_no_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_with_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_grpc_transport_channel(): From a09b8313d25aab5d2cfe319ebec4410411c9fa7a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 20:49:39 -0400 Subject: [PATCH 10/15] chore: use gapic-generator-python 0.65.1 (#302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 6 ++++-- .../services/datastore_admin/transports/base.py | 6 ++++-- .../datastore_v1/services/datastore/async_client.py | 9 ++++++--- .../datastore_v1/services/datastore/transports/base.py | 9 ++++++--- 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 122d1fe5..0f6be699 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -838,7 +838,8 @@ def sample_get_index(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -920,7 +921,8 @@ def sample_list_indexes(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 0cf9ac64..618a990c 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -154,7 +154,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -168,7 +169,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index a4c41543..ab4d60cc 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -300,7 +300,8 @@ def sample_lookup(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -376,7 +377,8 @@ def sample_run_query(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -913,7 +915,8 @@ def sample_reserve_ids(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index 0bf916c8..22a4c167 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -131,7 +131,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -145,7 +146,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -179,7 +181,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), From 75766c88ce14ef615b57a78446b0f0389f96c9d6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:49:57 -0400 Subject: [PATCH 11/15] chore(python): add nox session to sort python imports (#303) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 27 ++++++++++++++++++++++++--- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index bc893c97..7c454abf 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/noxfile.py b/noxfile.py index 975a93e3..27e2a51e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,7 +25,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -85,7 +86,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -96,7 +97,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) From ed2da39770ed4868be31cb26b660f3ddac7f2fa2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:08:20 +0000 Subject: [PATCH 12/15] chore(python): use ubuntu 22.04 in docs image (#305) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/docker/docs/Dockerfile | 20 ++++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7c454abf..64f82d6b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb8..238b87b9 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From 219af7a96a61998919f72b771051ec703463d1f5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:12:55 -0400 Subject: [PATCH 13/15] chore: [autoapprove] update readme_gen.py to include autoescape True (#307) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 64f82d6b..b631901e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e9..91b59676 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From 2adcdc20ea6e0c734d22dad96b661c737c57acd6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 23:02:12 +0000 Subject: [PATCH 14/15] chore(python): auto approve template changes (#309) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 .github/auto-approve.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b631901e..757c9dca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 00000000..311ebbb8 --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From b9e9cec93b5da9e1f09f6223e7a46b4042316347 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 May 2022 08:51:28 -0400 Subject: [PATCH 15/15] chore(main): release 2.6.0 (#301) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- CHANGELOG.md | 12 ++++++++++++ google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8064ab65..5c7d5bb1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.6.0](https://github.com/googleapis/python-datastore/compare/v2.5.1...v2.6.0) (2022-05-05) + + +### Features + +* expose new read_time API fields, currently only available in private preview ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + + +### Documentation + +* fix type in docstring for map fields ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + ### [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py index aa1cc6e9..ae34a9fb 100644 --- a/google/cloud/datastore/version.py +++ b/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.5.1" +__version__ = "2.6.0"