From 8c8ee8159e9f1bcb319b4c9c81642f568e7ce167 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 12 Dec 2023 23:18:29 +0100 Subject: [PATCH 01/24] chore(deps): update dependency google-cloud-datastore to v2.19.0 (#508) --- samples/snippets/requirements.txt | 2 +- samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index d4e90e37..5bccacc5 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.18.0 \ No newline at end of file +google-cloud-datastore==2.19.0 \ No newline at end of file diff --git a/samples/snippets/schedule-export/requirements.txt b/samples/snippets/schedule-export/requirements.txt index a84b83a1..b748abdc 100644 --- a/samples/snippets/schedule-export/requirements.txt +++ b/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.18.0 +google-cloud-datastore==2.19.0 From 945de5a4981822731ee7e1726b66999f0a085d76 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:27:11 -0500 Subject: [PATCH 02/24] build: update actions/upload-artifact and actions/download-artifact (#510) Source-Link: https://github.com/googleapis/synthtool/commit/280ddaed417057dfe5b1395731de07b7d09f5058 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/unittest.yml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 40bf9973..9bee2409 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 +# created: 2023-12-14T22:17:57.611773021Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index d6ca6562..f4a337c4 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -26,9 +26,9 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} cover: @@ -47,11 +47,11 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 From b399c4c216f4f3a5226ccdf7399a6e29c7cb348a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:38:55 -0800 Subject: [PATCH 03/24] feat: add new types QueryMode, QueryPlan, ResultSetStats (#512) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add new types QueryMode, QueryPlan, ResultSetStats feat: add QueryMode field to RunQueryRequest feat: add ResultSetStats field to RunQueryResponse feat: add QueryMode field to RunAggregationQueryRequest feat: add ResultSetStats field to RunAggregationQueryResponse PiperOrigin-RevId: 595774772 Source-Link: https://github.com/googleapis/googleapis/commit/03e7ed44ac499ee71baa2770e78045a3a85f30f5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/dc63e0dea8423c230d5fb0937acb3c98719c9395 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGM2M2UwZGVhODQyM2MyMzBkNWZiMDkzN2FjYjNjOTg3MTljOTM5NSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/datastore_v1/__init__.py | 6 + .../services/datastore/async_client.py | 1 + .../datastore_v1/services/datastore/client.py | 1 + google/cloud/datastore_v1/types/__init__.py | 8 ++ google/cloud/datastore_v1/types/datastore.py | 43 ++++++++ .../cloud/datastore_v1/types/query_profile.py | 104 ++++++++++++++++++ scripts/fixup_datastore_v1_keywords.py | 4 +- .../unit/gapic/datastore_v1/test_datastore.py | 1 + 8 files changed, 166 insertions(+), 2 deletions(-) create mode 100644 google/cloud/datastore_v1/types/query_profile.py diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index b494905b..c72ebcf6 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -61,6 +61,9 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch +from .types.query_profile import QueryPlan +from .types.query_profile import ResultSetStats +from .types.query_profile import QueryMode __all__ = ( "DatastoreAsyncClient", @@ -93,10 +96,13 @@ "PropertyOrder", "PropertyReference", "Query", + "QueryMode", + "QueryPlan", "QueryResultBatch", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", + "ResultSetStats", "RollbackRequest", "RollbackResponse", "RunAggregationQueryRequest", diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index c49b4b07..b4968920 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -46,6 +46,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 58c8ad22..b070d01c 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -50,6 +50,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index b6ff2a44..84db9bf8 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -61,6 +61,11 @@ Query, QueryResultBatch, ) +from .query_profile import ( + QueryPlan, + ResultSetStats, + QueryMode, +) __all__ = ( "AggregationResult", @@ -103,4 +108,7 @@ "PropertyReference", "Query", "QueryResultBatch", + "QueryPlan", + "ResultSetStats", + "QueryMode", ) diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index 6c768904..07ccfba8 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -22,6 +22,7 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -185,6 +186,11 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + mode (google.cloud.datastore_v1.types.QueryMode): + Optional. The mode in which the query request is processed. + This field is optional, and when not provided, it defaults + to ``NORMAL`` mode where no additional statistics will be + returned with the query results. """ project_id: str = proto.Field( @@ -217,6 +223,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + mode: query_profile.QueryMode = proto.Field( + proto.ENUM, + number=11, + enum=query_profile.QueryMode, + ) class RunQueryResponse(proto.Message): @@ -237,6 +248,12 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. + stats (google.cloud.datastore_v1.types.ResultSetStats): + Query plan and execution statistics. Note that the returned + stats are subject to change as Firestore evolves. + + This is only present when the request specifies a mode other + than ``NORMAL``. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -253,6 +270,11 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) + stats: query_profile.ResultSetStats = proto.Field( + proto.MESSAGE, + number=6, + message=query_profile.ResultSetStats, + ) class RunAggregationQueryRequest(proto.Message): @@ -292,6 +314,11 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. + mode (google.cloud.datastore_v1.types.QueryMode): + Optional. The mode in which the query request is processed. + This field is optional, and when not provided, it defaults + to ``NORMAL`` mode where no additional statistics will be + returned with the query results. """ project_id: str = proto.Field( @@ -324,6 +351,11 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + mode: query_profile.QueryMode = proto.Field( + proto.ENUM, + number=10, + enum=query_profile.QueryMode, + ) class RunAggregationQueryResponse(proto.Message): @@ -345,6 +377,12 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. + stats (google.cloud.datastore_v1.types.ResultSetStats): + Query plan and execution statistics. Note that the returned + stats are subject to change as Firestore evolves. + + This is only present when the request specifies a mode other + than ``NORMAL``. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -361,6 +399,11 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) + stats: query_profile.ResultSetStats = proto.Field( + proto.MESSAGE, + number=6, + message=query_profile.ResultSetStats, + ) class BeginTransactionRequest(proto.Message): diff --git a/google/cloud/datastore_v1/types/query_profile.py b/google/cloud/datastore_v1/types/query_profile.py new file mode 100644 index 00000000..3258a0f7 --- /dev/null +++ b/google/cloud/datastore_v1/types/query_profile.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "QueryMode", + "QueryPlan", + "ResultSetStats", + }, +) + + +class QueryMode(proto.Enum): + r"""The mode in which the query request must be processed. + + Values: + NORMAL (0): + The default mode. Only the query results are + returned. + PLAN (1): + This mode returns only the query plan, + without any results or execution statistics + information. + PROFILE (2): + This mode returns both the query plan and the + execution statistics along with the results. + """ + NORMAL = 0 + PLAN = 1 + PROFILE = 2 + + +class QueryPlan(proto.Message): + r"""Plan for the query. + + Attributes: + plan_info (google.protobuf.struct_pb2.Struct): + Planning phase information for the query. It will include: + + { "indexes_used": [ {"query_scope": "Collection", + "properties": "(foo ASC, **name** ASC)"}, {"query_scope": + "Collection", "properties": "(bar ASC, **name** ASC)"} ] } + """ + + plan_info: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class ResultSetStats(proto.Message): + r"""Planning and execution statistics for the query. + + Attributes: + query_plan (google.cloud.datastore_v1.types.QueryPlan): + Plan for the query. + query_stats (google.protobuf.struct_pb2.Struct): + Aggregated statistics from the execution of the query. + + This will only be present when the request specifies + ``PROFILE`` mode. For example, a query will return the + statistics including: + + { "results_returned": "20", "documents_scanned": "20", + "indexes_entries_scanned": "10050", "total_execution_time": + "100.7 msecs" } + """ + + query_plan: "QueryPlan" = proto.Field( + proto.MESSAGE, + number=1, + message="QueryPlan", + ) + query_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index 4a8be1c9..eb323e40 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'mode', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'mode', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 52d8de52..8603a840 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -51,6 +51,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore From 4532da57d7d9e30239df376f643977e9ce8cddba Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 22 Jan 2024 20:40:58 +0100 Subject: [PATCH 04/24] chore(deps): update dependency pytest to v7.4.4 (#511) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index dfba3f35..80d3b1a9 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.3 +pytest==7.4.4 flaky==3.7.0 diff --git a/samples/snippets/schedule-export/requirements-test.txt b/samples/snippets/schedule-export/requirements-test.txt index f16ee69a..fa427e19 100644 --- a/samples/snippets/schedule-export/requirements-test.txt +++ b/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.3 \ No newline at end of file +pytest==7.4.4 \ No newline at end of file From b1853553b42799dfda7d40958a750af795715e47 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:17:19 -0500 Subject: [PATCH 05/24] build(python): fix `docs` and `docfx` builds (#515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build(python): fix `docs` and `docfx` builds Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa * exclude types-protobuf==4.24.0.20240106 release * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 6 +++--- .kokoro/requirements.txt | 6 +++--- noxfile.py | 28 ++++++++++++++++++++++++++-- owlbot.py | 4 +++- 4 files changed, 35 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9bee2409..d8a1bbca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 -# created: 2023-12-14T22:17:57.611773021Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index e5c1ffca..bb3d6ca3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/noxfile.py b/noxfile.py index 2f7a6fca..e4e112d5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -134,8 +134,14 @@ def format(session): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 session.install( - "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + "mypy", + "types-setuptools", + "types-mock", + "types-protobuf!=4.24.0.20240106", + "types-requests", ) session.run("mypy", "-p", "google") @@ -304,7 +310,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -341,6 +356,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/owlbot.py b/owlbot.py index 77768392..82565aa3 100644 --- a/owlbot.py +++ b/owlbot.py @@ -280,8 +280,10 @@ def lint_setup_py\(session\): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 session.install( - "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + "mypy", "types-setuptools", "types-mock", "types-protobuf!=4.24.0.20240106", "types-requests" ) session.run("mypy", "-p", "google") From 5270f653c227be4229c38450e859e7333ebd86aa Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 24 Jan 2024 04:00:56 -0800 Subject: [PATCH 06/24] chore: Update CODEOWNERS (#516) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update CODEOWNERS * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .github/CODEOWNERS | 8 ++++---- .repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index eb5a61d0..64750824 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners -# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners diff --git a/.repo-metadata.json b/.repo-metadata.json index e6645432..d7b18d4a 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -11,7 +11,7 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk", + "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners", "api_shortname": "datastore", "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." } From fab2f42d1dd3383253fde5cc55ca35b747de7bb8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 25 Jan 2024 16:01:00 -0500 Subject: [PATCH 07/24] chore(revert): revert add new types QueryMode, QueryPlan, ResultSetStats --- google/cloud/datastore_v1/__init__.py | 6 - .../services/datastore/async_client.py | 1 - .../datastore_v1/services/datastore/client.py | 1 - google/cloud/datastore_v1/types/__init__.py | 8 -- google/cloud/datastore_v1/types/datastore.py | 43 -------- .../cloud/datastore_v1/types/query_profile.py | 104 ------------------ scripts/fixup_datastore_v1_keywords.py | 4 +- .../unit/gapic/datastore_v1/test_datastore.py | 1 - 8 files changed, 2 insertions(+), 166 deletions(-) delete mode 100644 google/cloud/datastore_v1/types/query_profile.py diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index c72ebcf6..b494905b 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -61,9 +61,6 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch -from .types.query_profile import QueryPlan -from .types.query_profile import ResultSetStats -from .types.query_profile import QueryMode __all__ = ( "DatastoreAsyncClient", @@ -96,13 +93,10 @@ "PropertyOrder", "PropertyReference", "Query", - "QueryMode", - "QueryPlan", "QueryResultBatch", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", - "ResultSetStats", "RollbackRequest", "RollbackResponse", "RunAggregationQueryRequest", diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index b4968920..c49b4b07 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -46,7 +46,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index b070d01c..58c8ad22 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -50,7 +50,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index 84db9bf8..b6ff2a44 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -61,11 +61,6 @@ Query, QueryResultBatch, ) -from .query_profile import ( - QueryPlan, - ResultSetStats, - QueryMode, -) __all__ = ( "AggregationResult", @@ -108,7 +103,4 @@ "PropertyReference", "Query", "QueryResultBatch", - "QueryPlan", - "ResultSetStats", - "QueryMode", ) diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index 07ccfba8..6c768904 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -22,7 +22,6 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query -from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -186,11 +185,6 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. - mode (google.cloud.datastore_v1.types.QueryMode): - Optional. The mode in which the query request is processed. - This field is optional, and when not provided, it defaults - to ``NORMAL`` mode where no additional statistics will be - returned with the query results. """ project_id: str = proto.Field( @@ -223,11 +217,6 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) - mode: query_profile.QueryMode = proto.Field( - proto.ENUM, - number=11, - enum=query_profile.QueryMode, - ) class RunQueryResponse(proto.Message): @@ -248,12 +237,6 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. - stats (google.cloud.datastore_v1.types.ResultSetStats): - Query plan and execution statistics. Note that the returned - stats are subject to change as Firestore evolves. - - This is only present when the request specifies a mode other - than ``NORMAL``. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -270,11 +253,6 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) - stats: query_profile.ResultSetStats = proto.Field( - proto.MESSAGE, - number=6, - message=query_profile.ResultSetStats, - ) class RunAggregationQueryRequest(proto.Message): @@ -314,11 +292,6 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. - mode (google.cloud.datastore_v1.types.QueryMode): - Optional. The mode in which the query request is processed. - This field is optional, and when not provided, it defaults - to ``NORMAL`` mode where no additional statistics will be - returned with the query results. """ project_id: str = proto.Field( @@ -351,11 +324,6 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) - mode: query_profile.QueryMode = proto.Field( - proto.ENUM, - number=10, - enum=query_profile.QueryMode, - ) class RunAggregationQueryResponse(proto.Message): @@ -377,12 +345,6 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. - stats (google.cloud.datastore_v1.types.ResultSetStats): - Query plan and execution statistics. Note that the returned - stats are subject to change as Firestore evolves. - - This is only present when the request specifies a mode other - than ``NORMAL``. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -399,11 +361,6 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) - stats: query_profile.ResultSetStats = proto.Field( - proto.MESSAGE, - number=6, - message=query_profile.ResultSetStats, - ) class BeginTransactionRequest(proto.Message): diff --git a/google/cloud/datastore_v1/types/query_profile.py b/google/cloud/datastore_v1/types/query_profile.py deleted file mode 100644 index 3258a0f7..00000000 --- a/google/cloud/datastore_v1/types/query_profile.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.datastore.v1", - manifest={ - "QueryMode", - "QueryPlan", - "ResultSetStats", - }, -) - - -class QueryMode(proto.Enum): - r"""The mode in which the query request must be processed. - - Values: - NORMAL (0): - The default mode. Only the query results are - returned. - PLAN (1): - This mode returns only the query plan, - without any results or execution statistics - information. - PROFILE (2): - This mode returns both the query plan and the - execution statistics along with the results. - """ - NORMAL = 0 - PLAN = 1 - PROFILE = 2 - - -class QueryPlan(proto.Message): - r"""Plan for the query. - - Attributes: - plan_info (google.protobuf.struct_pb2.Struct): - Planning phase information for the query. It will include: - - { "indexes_used": [ {"query_scope": "Collection", - "properties": "(foo ASC, **name** ASC)"}, {"query_scope": - "Collection", "properties": "(bar ASC, **name** ASC)"} ] } - """ - - plan_info: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - - -class ResultSetStats(proto.Message): - r"""Planning and execution statistics for the query. - - Attributes: - query_plan (google.cloud.datastore_v1.types.QueryPlan): - Plan for the query. - query_stats (google.protobuf.struct_pb2.Struct): - Aggregated statistics from the execution of the query. - - This will only be present when the request specifies - ``PROFILE`` mode. For example, a query will return the - statistics including: - - { "results_returned": "20", "documents_scanned": "20", - "indexes_entries_scanned": "10050", "total_execution_time": - "100.7 msecs" } - """ - - query_plan: "QueryPlan" = proto.Field( - proto.MESSAGE, - number=1, - message="QueryPlan", - ) - query_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index eb323e40..4a8be1c9 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'mode', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'mode', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 8603a840..52d8de52 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -51,7 +51,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore From 28d32cb3e9d77d4d10c3abf9ebdd1de23c2d6ae5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Feb 2024 08:17:00 -0500 Subject: [PATCH 08/24] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#523) Source-Link: https://github.com/googleapis/synthtool/commit/e13b22b1f660c80e4c3e735a9177d2f16c4b8bdc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 57 ++++++++++++++++++++++----------------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d8a1bbca..2aefd0e9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bb3d6ca3..8c11c9f3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From 31ffb848ea223f3954d029d1e0cd9f0b371491d7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:39:30 -0500 Subject: [PATCH 09/24] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#527) Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 66 +++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2aefd0e9..e4e943e0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8c11c9f3..bda8e38c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From 5fe726819c0c8941da830d0c567740871e0e7e49 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 6 Mar 2024 10:41:00 -0500 Subject: [PATCH 10/24] chore: fix typos (#525) * chore: fix minor typos * protobf -> protobuf --- google/cloud/datastore/_http.py | 2 +- tests/system/test_transaction.py | 2 +- tests/unit/test_helpers.py | 18 +++++++++--------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/google/cloud/datastore/_http.py b/google/cloud/datastore/_http.py index a4441c09..1eccfd35 100644 --- a/google/cloud/datastore/_http.py +++ b/google/cloud/datastore/_http.py @@ -32,7 +32,7 @@ def _make_retry_timeout_kwargs(retry, timeout): - """Helper for methods taking optional retry / timout args.""" + """Helper for methods taking optional retry / timeout args.""" kwargs = {} if retry is not None: diff --git a/tests/system/test_transaction.py b/tests/system/test_transaction.py index a93538fb..6dc9dacd 100644 --- a/tests/system/test_transaction.py +++ b/tests/system/test_transaction.py @@ -49,7 +49,7 @@ def test_transaction_via_explicit_begin_get_commit( # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 # Note that this example lacks the threading which provokes the race # condition in that issue: we are basically just exercising the - # "explict" path for using transactions. + # "explicit" path for using transactions. before_1 = 100 before_2 = 0 transfer_amount = 40 diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 467a2df1..89bf6165 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -208,7 +208,7 @@ def _compare_entity_proto(entity_pb1, entity_pb2): assert val1 == val2 -def test_enity_to_protobf_w_empty(): +def test_entity_to_protobuf_w_empty(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -218,7 +218,7 @@ def test_enity_to_protobf_w_empty(): _compare_entity_proto(entity_pb, entity_pb2.Entity()) -def test_enity_to_protobf_w_key_only(): +def test_entity_to_protobuf_w_key_only(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -239,7 +239,7 @@ def test_enity_to_protobf_w_key_only(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_simple_fields(): +def test_entity_to_protobuf_w_simple_fields(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -261,7 +261,7 @@ def test_enity_to_protobf_w_simple_fields(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_with_empty_list(): +def test_entity_to_protobuf_w_with_empty_list(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -277,7 +277,7 @@ def test_enity_to_protobf_w_with_empty_list(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_inverts_to_protobuf(): +def test_entity_to_protobuf_w_inverts_to_protobuf(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -332,7 +332,7 @@ def test_enity_to_protobf_w_inverts_to_protobuf(): _compare_entity_proto(original_pb, new_pb) -def test_enity_to_protobf_w_meaning_with_change(): +def test_entity_to_protobuf_w_meaning_with_change(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -352,7 +352,7 @@ def test_enity_to_protobf_w_meaning_with_change(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_variable_meanings(): +def test_entity_to_protobuf_w_variable_meanings(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -380,7 +380,7 @@ def test_enity_to_protobf_w_variable_meanings(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_dict_to_entity(): +def test_entity_to_protobuf_w_dict_to_entity(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -401,7 +401,7 @@ def test_enity_to_protobf_w_dict_to_entity(): assert entity_pb == expected_pb -def test_enity_to_protobf_w_dict_to_entity_recursive(): +def test_entity_to_protobuf_w_dict_to_entity_recursive(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf From dfbee2db800a3ca99e65a5d386ea907db1c46598 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 21:06:10 +0000 Subject: [PATCH 11/24] feat: add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats (#521) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Allow users to explicitly configure universe domain chore: Update gapic-generator-python to v1.14.0 PiperOrigin-RevId: 603108274 Source-Link: https://github.com/googleapis/googleapis/commit/3d83e3652f689ab51c3f95f876458c6faef619bf Source-Link: https://github.com/googleapis/googleapis-gen/commit/baf5e9bbb14a768b2b4c9eae9feb78f18f1757fa Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmFmNWU5YmJiMTRhNzY4YjJiNGM5ZWFlOWZlYjc4ZjE4ZjE3NTdmYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Resolve AttributeError 'Credentials' object has no attribute 'universe_domain' fix: Add google-auth as a direct dependency fix: Add staticmethod decorator to methods added in v1.14.0 chore: Update gapic-generator-python to v1.14.1 PiperOrigin-RevId: 603728206 Source-Link: https://github.com/googleapis/googleapis/commit/9063da8b4d45339db4e2d7d92a27c6708620e694 Source-Link: https://github.com/googleapis/googleapis-gen/commit/891c67d0a855b08085eb301dabb14064ef4b2c6d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODkxYzY3ZDBhODU1YjA4MDg1ZWIzMDFkYWJiMTQwNjRlZjRiMmM2ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(diregapic): s/bazel/bazelisk/ in DIREGAPIC build GitHub action PiperOrigin-RevId: 604714585 Source-Link: https://github.com/googleapis/googleapis/commit/e4dce1324f4cb6dedb6822cb157e13cb8e0b3073 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4036f78305c5c2aab80ff91960b3a3d983ff4b03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDAzNmY3ODMwNWM1YzJhYWI4MGZmOTE5NjBiM2EzZDk4M2ZmNGIwMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Require `google-api-core>=1.34.1` fix: Resolve issue with missing import for certain enums in `**/types/…` PiperOrigin-RevId: 607041732 Source-Link: https://github.com/googleapis/googleapis/commit/b4532678459355676c95c00e39866776b7f40b2e Source-Link: https://github.com/googleapis/googleapis-gen/commit/cd796416f0f54cb22b2c44fb2d486960e693a346 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2Q3OTY0MTZmMGY1NGNiMjJiMmM0NGZiMmQ0ODY5NjBlNjkzYTM0NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Exclude google-auth 2.24.0 and 2.25.0 chore: Update gapic-generator-python to v1.14.4 PiperOrigin-RevId: 611561820 Source-Link: https://github.com/googleapis/googleapis/commit/87ef1fe57feede1f23b523f3c7fc4c3f2b92d6d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/197316137594aafad94dea31226528fbcc39310c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTk3MzE2MTM3NTk0YWFmYWQ5NGRlYTMxMjI2NTI4ZmJjYzM5MzEwYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add include_recaptcha_script for as a new action in firewall policies PiperOrigin-RevId: 612851792 Source-Link: https://github.com/googleapis/googleapis/commit/49ea2c0fc42dd48996b833f05a258ad7e8590d3d Source-Link: https://github.com/googleapis/googleapis-gen/commit/460fdcbbbe00f35b1c591b1f3ef0c77ebd3ce277 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDYwZmRjYmJiZTAwZjM1YjFjNTkxYjFmM2VmMGM3N2ViZDNjZTI3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix coverage * fix(deps): require google-auth >= 2.14.1 * feat: add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats feat: add ExplainOptions field to RunQueryRequest feat: add ExplainMetrics field to RunQueryResponse feat: add ExplainOptions field to RunAggregationQueryRequest feat: add ExplainMetrics field to RunAggregationQueryResponse PiperOrigin-RevId: 615158168 Source-Link: https://github.com/googleapis/googleapis/commit/4d535ac0538bb2d4b406250d7ec10b25a17a54cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/02e272ded538b0f97832bfad47decbc3dc65a89a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDJlMjcyZGVkNTM4YjBmOTc4MzJiZmFkNDdkZWNiYzNkYzY1YTg5YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- google/cloud/datastore_admin/__init__.py | 2 +- google/cloud/datastore_admin_v1/__init__.py | 2 +- .../datastore_admin_v1/services/__init__.py | 2 +- .../services/datastore_admin/__init__.py | 2 +- .../services/datastore_admin/async_client.py | 98 +++- .../services/datastore_admin/client.py | 322 ++++++++++-- .../services/datastore_admin/pagers.py | 2 +- .../datastore_admin/transports/__init__.py | 2 +- .../datastore_admin/transports/base.py | 8 +- .../datastore_admin/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../datastore_admin/transports/rest.py | 26 +- .../datastore_admin_v1/types/__init__.py | 2 +- .../types/datastore_admin.py | 2 +- .../cloud/datastore_admin_v1/types/index.py | 2 +- .../datastore_admin_v1/types/migration.py | 2 +- google/cloud/datastore_v1/__init__.py | 10 +- .../cloud/datastore_v1/services/__init__.py | 2 +- .../services/datastore/__init__.py | 2 +- .../services/datastore/async_client.py | 105 +++- .../datastore_v1/services/datastore/client.py | 326 ++++++++++-- .../services/datastore/transports/__init__.py | 2 +- .../services/datastore/transports/base.py | 8 +- .../services/datastore/transports/grpc.py | 4 +- .../datastore/transports/grpc_asyncio.py | 4 +- .../services/datastore/transports/rest.py | 48 +- google/cloud/datastore_v1/types/__init__.py | 12 +- .../datastore_v1/types/aggregation_result.py | 2 +- google/cloud/datastore_v1/types/datastore.py | 43 +- google/cloud/datastore_v1/types/entity.py | 2 +- google/cloud/datastore_v1/types/query.py | 2 +- .../cloud/datastore_v1/types/query_profile.py | 144 ++++++ scripts/fixup_datastore_admin_v1_keywords.py | 2 +- scripts/fixup_datastore_v1_keywords.py | 6 +- setup.py | 3 + testing/constraints-3.7.txt | 1 + testing/constraints-3.8.txt | 1 + tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- .../unit/gapic/datastore_admin_v1/__init__.py | 2 +- .../test_datastore_admin.py | 481 +++++++++++++++-- tests/unit/gapic/datastore_v1/__init__.py | 2 +- .../unit/gapic/datastore_v1/test_datastore.py | 488 +++++++++++++++--- 44 files changed, 1899 insertions(+), 291 deletions(-) create mode 100644 google/cloud/datastore_v1/types/query_profile.py diff --git a/google/cloud/datastore_admin/__init__.py b/google/cloud/datastore_admin/__init__.py index 17ff436e..09b75aef 100644 --- a/google/cloud/datastore_admin/__init__.py +++ b/google/cloud/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/__init__.py b/google/cloud/datastore_admin_v1/__init__.py index b909402e..6d57bbb9 100644 --- a/google/cloud/datastore_admin_v1/__init__.py +++ b/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/__init__.py b/google/cloud/datastore_admin_v1/services/__init__.py index 89a37dc9..8f6cf068 100644 --- a/google/cloud/datastore_admin_v1/services/__init__.py +++ b/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index bbf2af15..be83caf7 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 40cde757..b564e8c1 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -113,8 +113,12 @@ class DatastoreAdminAsyncClient: _client: DatastoreAdminClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = DatastoreAdminClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatastoreAdminClient._DEFAULT_UNIVERSE common_billing_account_path = staticmethod( DatastoreAdminClient.common_billing_account_path @@ -219,6 +223,25 @@ def transport(self) -> DatastoreAdminTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient) ) @@ -231,7 +254,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the datastore admin client. + """Instantiates the datastore admin async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -242,23 +265,38 @@ def __init__( transport (Union[str, ~.DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -432,6 +470,9 @@ async def sample_export_entities(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -611,6 +652,9 @@ async def sample_import_entities(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -721,6 +765,9 @@ async def sample_create_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -833,6 +880,9 @@ async def sample_delete_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -933,6 +983,9 @@ async def sample_get_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1031,6 +1084,9 @@ async def sample_list_indexes(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1094,6 +1150,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1148,6 +1207,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1206,6 +1268,9 @@ async def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1260,6 +1325,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index cadac67e..de174f58 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.datastore_admin_v1 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -183,11 +184,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "datastore.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -316,7 +321,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -346,6 +351,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -379,6 +389,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatastoreAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DatastoreAdminClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -398,22 +580,32 @@ def __init__( transport (Union[str, DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -424,17 +616,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DatastoreAdminClient._read_environment_variables() + self._client_cert_source = DatastoreAdminClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert ) + self._universe_domain = DatastoreAdminClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -443,20 +652,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, DatastoreAdminTransport): + transport_provided = isinstance(transport, DatastoreAdminTransport) + if transport_provided: # transport is a DatastoreAdminTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(DatastoreAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DatastoreAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -466,17 +688,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def export_entities( @@ -640,6 +862,9 @@ def sample_export_entities(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -818,6 +1043,9 @@ def sample_import_entities(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -929,6 +1157,9 @@ def sample_create_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1042,6 +1273,9 @@ def sample_delete_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1133,6 +1367,9 @@ def sample_get_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1222,6 +1459,9 @@ def sample_list_indexes(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1298,6 +1538,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1352,6 +1595,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1410,6 +1656,9 @@ def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1464,6 +1713,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 139e1983..eb4bd0dc 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index d1930caa..f3b7656e 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index d37482a1..bddab490 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -63,7 +63,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -126,6 +126,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index f5fd0e90..68867594 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -120,7 +120,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 3c6aa351..367a5ab6 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -165,7 +165,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index b29a8b75..8776e623 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,9 +35,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin @@ -443,7 +443,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -599,9 +599,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -610,7 +608,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -688,7 +685,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -772,9 +768,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -783,7 +777,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -859,7 +852,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -945,9 +937,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -956,7 +946,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1034,7 +1023,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/datastore_admin_v1/types/__init__.py b/google/cloud/datastore_admin_v1/types/__init__.py index 7ba786ca..ca082a05 100644 --- a/google/cloud/datastore_admin_v1/types/__init__.py +++ b/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index 44782727..eb838570 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/index.py b/google/cloud/datastore_admin_v1/types/index.py index dfb44417..77a7079d 100644 --- a/google/cloud/datastore_admin_v1/types/index.py +++ b/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/migration.py b/google/cloud/datastore_admin_v1/types/migration.py index 12e93359..ec69e941 100644 --- a/google/cloud/datastore_admin_v1/types/migration.py +++ b/google/cloud/datastore_admin_v1/types/migration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index b494905b..8c9d09fe 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,6 +61,10 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch +from .types.query_profile import ExecutionStats +from .types.query_profile import ExplainMetrics +from .types.query_profile import ExplainOptions +from .types.query_profile import PlanSummary __all__ = ( "DatastoreAsyncClient", @@ -78,6 +82,9 @@ "DatastoreClient", "Entity", "EntityResult", + "ExecutionStats", + "ExplainMetrics", + "ExplainOptions", "Filter", "GqlQuery", "GqlQueryParameter", @@ -88,6 +95,7 @@ "Mutation", "MutationResult", "PartitionId", + "PlanSummary", "Projection", "PropertyFilter", "PropertyOrder", diff --git a/google/cloud/datastore_v1/services/__init__.py b/google/cloud/datastore_v1/services/__init__.py index 89a37dc9..8f6cf068 100644 --- a/google/cloud/datastore_v1/services/__init__.py +++ b/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/__init__.py b/google/cloud/datastore_v1/services/datastore/__init__.py index a93070e2..e992abb3 100644 --- a/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index c49b4b07..e911a362 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,14 +38,15 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -65,8 +66,12 @@ class DatastoreAsyncClient: _client: DatastoreClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatastoreClient._DEFAULT_UNIVERSE common_billing_account_path = staticmethod( DatastoreClient.common_billing_account_path @@ -165,6 +170,25 @@ def transport(self) -> DatastoreTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(DatastoreClient).get_transport_class, type(DatastoreClient) ) @@ -177,7 +201,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the datastore client. + """Instantiates the datastore async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -188,23 +212,38 @@ def __init__( transport (Union[str, ~.DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -337,6 +376,9 @@ async def sample_lookup(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -429,6 +471,9 @@ async def sample_run_query(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -521,6 +566,9 @@ async def sample_run_aggregation_query(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -625,6 +673,9 @@ async def sample_begin_transaction(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -775,6 +826,9 @@ async def sample_commit(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -892,6 +946,9 @@ async def sample_rollback(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1009,6 +1066,9 @@ async def sample_allocate_ids(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1135,6 +1195,9 @@ async def sample_reserve_ids(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1189,6 +1252,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1243,6 +1309,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1301,6 +1370,9 @@ async def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1355,6 +1427,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 58c8ad22..0a498175 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.datastore_v1 import gapic_version as package_version @@ -42,14 +43,15 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -133,11 +135,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "datastore.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -266,7 +272,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -296,6 +302,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -329,6 +340,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DatastoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DatastoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatastoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DatastoreClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DatastoreClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -348,22 +531,32 @@ def __init__( transport (Union[str, DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -374,17 +567,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DatastoreClient._read_environment_variables() + self._client_cert_source = DatastoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DatastoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -393,20 +603,30 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, DatastoreTransport): + transport_provided = isinstance(transport, DatastoreTransport) + if transport_provided: # transport is a DatastoreTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(DatastoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or DatastoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -416,17 +636,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def lookup( @@ -545,6 +765,9 @@ def sample_lookup(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -633,6 +856,9 @@ def sample_run_query(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -721,6 +947,9 @@ def sample_run_aggregation_query(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -830,6 +1059,9 @@ def sample_begin_transaction(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -985,6 +1217,9 @@ def sample_commit(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1107,6 +1342,9 @@ def sample_rollback(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1229,6 +1467,9 @@ def sample_allocate_ids(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1350,6 +1591,9 @@ def sample_reserve_ids(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1417,6 +1661,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1471,6 +1718,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1529,6 +1779,9 @@ def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1583,6 +1836,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 911cdd46..727e271c 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index e97e6b6b..3c31a4a7 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -124,6 +124,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 3e31e98e..ebc16b21 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -71,7 +71,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 2a3a66b0..7b3997dd 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -116,7 +116,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest.py b/google/cloud/datastore_v1/services/datastore/transports/rest.py index aa5b0d1e..24551618 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_v1.types import datastore @@ -443,7 +443,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -555,9 +555,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -566,7 +564,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -655,9 +652,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -666,7 +661,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -753,9 +747,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -764,7 +756,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -851,9 +842,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -862,7 +851,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -949,9 +937,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -960,7 +946,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1048,9 +1033,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1059,7 +1042,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1148,9 +1130,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1159,7 +1139,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1246,9 +1225,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1257,7 +1234,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index b6ff2a44..6aa3d846 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,6 +61,12 @@ Query, QueryResultBatch, ) +from .query_profile import ( + ExecutionStats, + ExplainMetrics, + ExplainOptions, + PlanSummary, +) __all__ = ( "AggregationResult", @@ -103,4 +109,8 @@ "PropertyReference", "Query", "QueryResultBatch", + "ExecutionStats", + "ExplainMetrics", + "ExplainOptions", + "PlanSummary", ) diff --git a/google/cloud/datastore_v1/types/aggregation_result.py b/google/cloud/datastore_v1/types/aggregation_result.py index dd53cfa3..b35ca1f9 100644 --- a/google/cloud/datastore_v1/types/aggregation_result.py +++ b/google/cloud/datastore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index 6c768904..ccea0458 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,6 +22,7 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -185,6 +186,11 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + explain_options (google.cloud.datastore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. """ project_id: str = proto.Field( @@ -217,6 +223,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=12, + message=query_profile.ExplainOptions, + ) class RunQueryResponse(proto.Message): @@ -237,6 +248,11 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. + explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunQueryRequest.explain_options][google.datastore.v1.RunQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -253,6 +269,11 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=9, + message=query_profile.ExplainMetrics, + ) class RunAggregationQueryRequest(proto.Message): @@ -292,6 +313,11 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. + explain_options (google.cloud.datastore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. """ project_id: str = proto.Field( @@ -324,6 +350,11 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=11, + message=query_profile.ExplainOptions, + ) class RunAggregationQueryResponse(proto.Message): @@ -345,6 +376,11 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. + explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunAggregationQueryRequest.explain_options][google.datastore.v1.RunAggregationQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -361,6 +397,11 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=9, + message=query_profile.ExplainMetrics, + ) class BeginTransactionRequest(proto.Message): diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index 09c0ecc8..5c5bcdc4 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 0ddd6811..2ce1000f 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/query_profile.py b/google/cloud/datastore_v1/types/query_profile.py new file mode 100644 index 00000000..8dca0f6e --- /dev/null +++ b/google/cloud/datastore_v1/types/query_profile.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "ExplainOptions", + "ExplainMetrics", + "PlanSummary", + "ExecutionStats", + }, +) + + +class ExplainOptions(proto.Message): + r"""Explain options for the query. + + Attributes: + analyze (bool): + Optional. Whether to execute this query. + + When false (the default), the query will be + planned, returning only metrics from the + planning stages. + + When true, the query will be planned and + executed, returning the full query results along + with both planning and execution stage metrics. + """ + + analyze: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ExplainMetrics(proto.Message): + r"""Explain metrics for the query. + + Attributes: + plan_summary (google.cloud.datastore_v1.types.PlanSummary): + Planning phase information for the query. + execution_stats (google.cloud.datastore_v1.types.ExecutionStats): + Aggregated stats from the execution of the query. Only + present when + [ExplainOptions.analyze][google.datastore.v1.ExplainOptions.analyze] + is set to true. + """ + + plan_summary: "PlanSummary" = proto.Field( + proto.MESSAGE, + number=1, + message="PlanSummary", + ) + execution_stats: "ExecutionStats" = proto.Field( + proto.MESSAGE, + number=2, + message="ExecutionStats", + ) + + +class PlanSummary(proto.Message): + r"""Planning phase information for the query. + + Attributes: + indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): + The indexes selected for the query. For example: [ + {"query_scope": "Collection", "properties": "(foo ASC, + **name** ASC)"}, {"query_scope": "Collection", "properties": + "(bar ASC, **name** ASC)"} ] + """ + + indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class ExecutionStats(proto.Message): + r"""Execution statistics for the query. + + Attributes: + results_returned (int): + Total number of results returned, including + documents, projections, aggregation results, + keys. + execution_duration (google.protobuf.duration_pb2.Duration): + Total time to execute the query in the + backend. + read_operations (int): + Total billable read operations. + debug_stats (google.protobuf.struct_pb2.Struct): + Debugging statistics from the execution of the query. Note + that the debugging stats are subject to change as Firestore + evolves. It could include: { "indexes_entries_scanned": + "1000", "documents_scanned": "20", "billing_details" : { + "documents_billable": "20", "index_entries_billable": + "1000", "min_query_cost": "0" } } + """ + + results_returned: int = proto.Field( + proto.INT64, + number=1, + ) + execution_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + read_operations: int = proto.Field( + proto.INT64, + number=4, + ) + debug_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_datastore_admin_v1_keywords.py b/scripts/fixup_datastore_admin_v1_keywords.py index 44ae7ecb..2f999e1e 100644 --- a/scripts/fixup_datastore_admin_v1_keywords.py +++ b/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index 4a8be1c9..f0406904 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'explain_options', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'explain_options', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/setup.py b/setup.py index 07827e71..5f588e05 100644 --- a/setup.py +++ b/setup.py @@ -30,6 +30,9 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 4b49551c..da9c6ca8 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,6 +6,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29b..932ece69 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1 @@ +google-api-core==2.14.0 diff --git a/tests/__init__.py b/tests/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/__init__.py b/tests/unit/gapic/datastore_admin_v1/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index eb8b8a4f..40adb3a4 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -79,6 +80,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -108,6 +120,274 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert DatastoreAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatastoreAdminClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DatastoreAdminClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreAdminClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatastoreAdminClient._get_client_cert_source(None, False) is None + assert ( + DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DatastoreAdminClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DatastoreAdminClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DatastoreAdminClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DatastoreAdminClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "always") + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DatastoreAdminClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DatastoreAdminClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DatastoreAdminClient._get_universe_domain(None, None) + == DatastoreAdminClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DatastoreAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -219,13 +499,13 @@ def test_datastore_admin_client_get_transport_class(): ) @mock.patch.object( DatastoreAdminClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), ) @mock.patch.object( DatastoreAdminAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), ) def test_datastore_admin_client_client_options( client_class, transport_class, transport_name @@ -267,7 +547,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -297,15 +579,23 @@ def test_datastore_admin_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -315,7 +605,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -333,7 +625,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -366,13 +660,13 @@ def test_datastore_admin_client_client_options( ) @mock.patch.object( DatastoreAdminClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), ) @mock.patch.object( DatastoreAdminAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_datastore_admin_client_mtls_env_auto( @@ -395,7 +689,9 @@ def test_datastore_admin_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -427,7 +723,9 @@ def test_datastore_admin_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -461,7 +759,9 @@ def test_datastore_admin_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -551,6 +851,115 @@ def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] +) +@mock.patch.object( + DatastoreAdminClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), +) +def test_datastore_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -577,7 +986,9 @@ def test_datastore_admin_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -617,7 +1028,9 @@ def test_datastore_admin_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -677,7 +1090,9 @@ def test_datastore_admin_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1838,7 +2253,7 @@ async def test_list_indexes_field_headers_async(): def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1888,7 +2303,7 @@ def test_list_indexes_pager(transport_name: str = "grpc"): def test_list_indexes_pages(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1930,7 +2345,7 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1980,7 +2395,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2072,11 +2487,7 @@ def test_export_entities_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2352,11 +2763,7 @@ def test_import_entities_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3260,7 +3667,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = DatastoreAdminClient( @@ -4841,7 +5248,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/gapic/datastore_v1/__init__.py b/tests/unit/gapic/datastore_v1/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/gapic/datastore_v1/__init__.py +++ b/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 52d8de52..8d427d56 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -51,6 +52,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore @@ -75,6 +77,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -99,6 +112,254 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatastoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DatastoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatastoreClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatastoreClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatastoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatastoreClient._get_client_cert_source(None, False) is None + assert ( + DatastoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DatastoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DatastoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DatastoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), +) +@mock.patch.object( + DatastoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatastoreClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DatastoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "always") + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DatastoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DatastoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DatastoreClient._get_universe_domain(None, None) + == DatastoreClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DatastoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + (DatastoreClient, transports.DatastoreRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -209,12 +470,14 @@ def test_datastore_client_get_transport_class(): ], ) @mock.patch.object( - DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), ) @mock.patch.object( DatastoreAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), ) def test_datastore_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. @@ -254,7 +517,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -284,15 +549,23 @@ def test_datastore_client_client_options(client_class, transport_class, transpor # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -302,7 +575,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -320,7 +595,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -352,12 +629,14 @@ def test_datastore_client_client_options(client_class, transport_class, transpor ], ) @mock.patch.object( - DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), ) @mock.patch.object( DatastoreAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_datastore_client_mtls_env_auto( @@ -380,7 +659,9 @@ def test_datastore_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -412,7 +693,9 @@ def test_datastore_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -446,7 +729,9 @@ def test_datastore_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -532,6 +817,113 @@ def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +@mock.patch.object( + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), +) +@mock.patch.object( + DatastoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), +) +def test_datastore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatastoreClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -558,7 +950,9 @@ def test_datastore_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -593,7 +987,9 @@ def test_datastore_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +1042,9 @@ def test_datastore_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -2477,11 +2875,7 @@ def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2765,11 +3159,7 @@ def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2976,11 +3366,7 @@ def test_run_aggregation_query_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3189,11 +3575,7 @@ def test_begin_transaction_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3456,11 +3838,7 @@ def test_commit_rest_required_fields(request_type=datastore.CommitRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3743,11 +4121,7 @@ def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4017,11 +4391,7 @@ def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsReq request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4296,11 +4666,7 @@ def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsReque request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4564,7 +4930,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = DatastoreClient( @@ -6086,7 +6452,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From b89df729b92606c7e47faa219596f8b4b8c66135 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 04:20:33 -0400 Subject: [PATCH 12/24] chore(python): update dependencies in /.kokoro (#530) Source-Link: https://github.com/googleapis/synthtool/commit/db94845da69ccdfefd7ce55c84e6cfa74829747e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/build.sh | 7 -- .kokoro/docker/docs/Dockerfile | 4 + .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 38 +++++++++ .kokoro/requirements.in | 3 +- .kokoro/requirements.txt | 114 ++++++++++++--------------- 7 files changed, 99 insertions(+), 72 deletions(-) create mode 100644 .kokoro/docker/docs/requirements.in create mode 100644 .kokoro/docker/docs/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e4e943e0..4bdeef39 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 +# created: 2024-03-15T16:25:47.905264637Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 3795bde0..f9800c92 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2cc..bdaf39fe 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 00000000..816817c6 --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 00000000..0e5d70f2 --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9f..fff4d9ce 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bda8e38c..dd61f5f3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From f4f3bc7d80dac0eba43d99d5058257d34cc6fb90 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 07:06:47 -0400 Subject: [PATCH 13/24] chore: Update gapic-generator-python to v1.16.1 (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.16.1 PiperOrigin-RevId: 618243632 Source-Link: https://github.com/googleapis/googleapis/commit/078a38bd240827be8e69a5b62993380d1b047994 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7af768c3f8ce58994482350f7401173329950a31 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2FmNzY4YzNmOGNlNTg5OTQ0ODIzNTBmNzQwMTE3MzMyOTk1MGEzMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../test_datastore_admin.py | 332 ++++++++++++- .../unit/gapic/datastore_v1/test_datastore.py | 450 +++++++++++++++++- 2 files changed, 754 insertions(+), 28 deletions(-) diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 40adb3a4..c08b309a 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1159,7 +1159,8 @@ def test_export_entities(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() + request = datastore_admin.ExportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1181,6 +1182,54 @@ def test_export_entities_empty_call(): assert args[0] == datastore_admin.ExportEntitiesRequest() +def test_export_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + client.export_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + +@pytest.mark.asyncio +async def test_export_entities_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ExportEntitiesRequest() + + @pytest.mark.asyncio async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest @@ -1205,7 +1254,8 @@ async def test_export_entities_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() + request = datastore_admin.ExportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1415,7 +1465,8 @@ def test_import_entities(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() + request = datastore_admin.ImportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1437,6 +1488,54 @@ def test_import_entities_empty_call(): assert args[0] == datastore_admin.ImportEntitiesRequest() +def test_import_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + client.import_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + +@pytest.mark.asyncio +async def test_import_entities_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ImportEntitiesRequest() + + @pytest.mark.asyncio async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest @@ -1461,7 +1560,8 @@ async def test_import_entities_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() + request = datastore_admin.ImportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1671,7 +1771,8 @@ def test_create_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() + request = datastore_admin.CreateIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1693,6 +1794,52 @@ def test_create_index_empty_call(): assert args[0] == datastore_admin.CreateIndexRequest() +def test_create_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.CreateIndexRequest( + project_id="project_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + client.create_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest( + project_id="project_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest() + + @pytest.mark.asyncio async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest @@ -1717,7 +1864,8 @@ async def test_create_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() + request = datastore_admin.CreateIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1815,7 +1963,8 @@ def test_delete_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() + request = datastore_admin.DeleteIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1837,6 +1986,54 @@ def test_delete_index_empty_call(): assert args[0] == datastore_admin.DeleteIndexRequest() +def test_delete_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.DeleteIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + client.delete_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest() + + @pytest.mark.asyncio async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest @@ -1861,7 +2058,8 @@ async def test_delete_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() + request = datastore_admin.DeleteIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1967,7 +2165,8 @@ def test_get_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() + request = datastore_admin.GetIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -1994,6 +2193,60 @@ def test_get_index_empty_call(): assert args[0] == datastore_admin.GetIndexRequest() +def test_get_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.GetIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + client.get_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.GetIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + ) + response = await client.get_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.GetIndexRequest() + + @pytest.mark.asyncio async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest @@ -2024,7 +2277,8 @@ async def test_get_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() + request = datastore_admin.GetIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -2129,7 +2383,8 @@ def test_list_indexes(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() + request = datastore_admin.ListIndexesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesPager) @@ -2152,6 +2407,58 @@ def test_list_indexes_empty_call(): assert args[0] == datastore_admin.ListIndexesRequest() +def test_list_indexes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ListIndexesRequest( + project_id="project_id_value", + filter="filter_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + client.list_indexes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ListIndexesRequest( + project_id="project_id_value", + filter="filter_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_indexes_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_indexes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ListIndexesRequest() + + @pytest.mark.asyncio async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest @@ -2178,7 +2485,8 @@ async def test_list_indexes_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() + request = datastore_admin.ListIndexesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesAsyncPager) diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 8d427d56..203d9c3a 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1113,7 +1113,8 @@ def test_lookup(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() + request = datastore.LookupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) @@ -1136,6 +1137,56 @@ def test_lookup_empty_call(): assert args[0] == datastore.LookupRequest() +def test_lookup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.LookupRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.LookupRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_lookup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.lookup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.LookupRequest() + + @pytest.mark.asyncio async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest @@ -1162,7 +1213,8 @@ async def test_lookup_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() + request = datastore.LookupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) @@ -1377,7 +1429,8 @@ def test_run_query(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() + request = datastore.RunQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) @@ -1400,6 +1453,56 @@ def test_run_query_empty_call(): assert args[0] == datastore.RunQueryRequest() +def test_run_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RunQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_run_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.run_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunQueryRequest() + + @pytest.mark.asyncio async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest @@ -1426,7 +1529,8 @@ async def test_run_query_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() + request = datastore.RunQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) @@ -1509,7 +1613,8 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() + request = datastore.RunAggregationQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) @@ -1534,6 +1639,60 @@ def test_run_aggregation_query_empty_call(): assert args[0] == datastore.RunAggregationQueryRequest() +def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RunAggregationQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.run_aggregation_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest() + + @pytest.mark.asyncio async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest @@ -1562,7 +1721,8 @@ async def test_run_aggregation_query_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() + request = datastore.RunAggregationQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) @@ -1649,7 +1809,8 @@ def test_begin_transaction(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() + request = datastore.BeginTransactionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) @@ -1674,6 +1835,60 @@ def test_begin_transaction_empty_call(): assert args[0] == datastore.BeginTransactionRequest() +def test_begin_transaction_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.BeginTransactionRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.BeginTransactionRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.BeginTransactionRequest() + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest @@ -1702,7 +1917,8 @@ async def test_begin_transaction_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() + request = datastore.BeginTransactionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) @@ -1873,7 +2089,8 @@ def test_commit(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() + request = datastore.CommitRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) @@ -1896,6 +2113,56 @@ def test_commit_empty_call(): assert args[0] == datastore.CommitRequest() +def test_commit_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.CommitRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.CommitRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_commit_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse( + index_updates=1389, + ) + ) + response = await client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.CommitRequest() + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest @@ -1922,7 +2189,8 @@ async def test_commit_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() + request = datastore.CommitRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) @@ -2165,7 +2433,8 @@ def test_rollback(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() + request = datastore.RollbackRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RollbackResponse) @@ -2187,6 +2456,54 @@ def test_rollback_empty_call(): assert args[0] == datastore.RollbackRequest() +def test_rollback_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RollbackRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RollbackRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + response = await client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RollbackRequest() + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest @@ -2211,7 +2528,8 @@ async def test_rollback_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() + request = datastore.RollbackRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RollbackResponse) @@ -2381,7 +2699,8 @@ def test_allocate_ids(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() + request = datastore.AllocateIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.AllocateIdsResponse) @@ -2403,6 +2722,54 @@ def test_allocate_ids_empty_call(): assert args[0] == datastore.AllocateIdsRequest() +def test_allocate_ids_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.AllocateIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.AllocateIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_allocate_ids_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + response = await client.allocate_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.AllocateIdsRequest() + + @pytest.mark.asyncio async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest @@ -2427,7 +2794,8 @@ async def test_allocate_ids_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() + request = datastore.AllocateIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.AllocateIdsResponse) @@ -2617,7 +2985,8 @@ def test_reserve_ids(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() + request = datastore.ReserveIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.ReserveIdsResponse) @@ -2639,6 +3008,54 @@ def test_reserve_ids_empty_call(): assert args[0] == datastore.ReserveIdsRequest() +def test_reserve_ids_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.ReserveIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.ReserveIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_reserve_ids_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + response = await client.reserve_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.ReserveIdsRequest() + + @pytest.mark.asyncio async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest @@ -2663,7 +3080,8 @@ async def test_reserve_ids_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() + request = datastore.ReserveIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.ReserveIdsResponse) From 43855dd1762f51771bb1a3924c6a234641950fb6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 4 Apr 2024 19:42:28 +0000 Subject: [PATCH 14/24] feat: add new_transaction support (#499) --- google/cloud/datastore/aggregation.py | 10 +- google/cloud/datastore/batch.py | 27 +++-- google/cloud/datastore/client.py | 26 +++-- google/cloud/datastore/helpers.py | 72 ++++++++----- google/cloud/datastore/query.py | 11 +- google/cloud/datastore/transaction.py | 60 ++++++++++- tests/system/test_transaction.py | 51 +++++++++ tests/unit/test_aggregation.py | 55 +++++++++- tests/unit/test_client.py | 51 ++++++++- tests/unit/test_helpers.py | 85 +++++++++++++++ tests/unit/test_query.py | 47 ++++++++- tests/unit/test_transaction.py | 144 ++++++++++++++++++++++++++ 12 files changed, 580 insertions(+), 59 deletions(-) diff --git a/google/cloud/datastore/aggregation.py b/google/cloud/datastore/aggregation.py index 47ebfebd..1384f332 100644 --- a/google/cloud/datastore/aggregation.py +++ b/google/cloud/datastore/aggregation.py @@ -442,13 +442,11 @@ def _next_page(self): return None query_pb = self._build_protobuf() - transaction = self.client.current_transaction - if transaction is None: - transaction_id = None - else: - transaction_id = transaction.id + transaction_id, new_transaction_options = helpers.get_transaction_options( + self.client.current_transaction + ) read_options = helpers.get_read_options( - self._eventual, transaction_id, self._read_time + self._eventual, transaction_id, self._read_time, new_transaction_options ) partition_id = entity_pb2.PartitionId( diff --git a/google/cloud/datastore/batch.py b/google/cloud/datastore/batch.py index e0dbf26d..69100bc6 100644 --- a/google/cloud/datastore/batch.py +++ b/google/cloud/datastore/batch.py @@ -192,6 +192,19 @@ def mutations(self): """ return self._mutations + def _allow_mutations(self) -> bool: + """ + This method is called to see if the batch is in a proper state to allow + `put` and `delete` operations. + + the Transaction subclass overrides this method to support + the `begin_later` flag. + + :rtype: bool + :returns: True if the batch is in a state to allow mutations. + """ + return self._status == self._IN_PROGRESS + def put(self, entity): """Remember an entity's state to be saved during :meth:`commit`. @@ -218,7 +231,7 @@ def put(self, entity): progress, if entity has no key assigned, or if the key's ``project`` does not match ours. """ - if self._status != self._IN_PROGRESS: + if not self._allow_mutations(): raise ValueError("Batch must be in progress to put()") if entity.key is None: @@ -248,7 +261,7 @@ def delete(self, key): progress, if key is not complete, or if the key's ``project`` does not match ours. """ - if self._status != self._IN_PROGRESS: + if not self._allow_mutations(): raise ValueError("Batch must be in progress to delete()") if key.is_partial: @@ -370,10 +383,12 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): try: - if exc_type is None: - self.commit() - else: - self.rollback() + # commit or rollback if not in terminal state + if self._status not in (self._ABORTED, self._FINISHED): + if exc_type is None: + self.commit() + else: + self.rollback() finally: self._client._pop_batch() diff --git a/google/cloud/datastore/client.py b/google/cloud/datastore/client.py index 3f5041d6..b1e79d91 100644 --- a/google/cloud/datastore/client.py +++ b/google/cloud/datastore/client.py @@ -122,7 +122,7 @@ def _extended_lookup( missing=None, deferred=None, eventual=False, - transaction_id=None, + transaction=None, retry=None, timeout=None, read_time=None, @@ -158,10 +158,10 @@ def _extended_lookup( consistency. If True, request ``EVENTUAL`` read consistency. - :type transaction_id: str - :param transaction_id: If passed, make the request in the scope of - the given transaction. Incompatible with - ``eventual==True`` or ``read_time``. + :type transaction: Transaction + :param transaction: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True`` or ``read_time``. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -177,7 +177,7 @@ def _extended_lookup( :type read_time: datetime :param read_time: (Optional) Read time to use for read consistency. Incompatible with - ``eventual==True`` or ``transaction_id``. + ``eventual==True`` or ``transaction``. This feature is in private preview. :type database: str @@ -199,8 +199,14 @@ def _extended_lookup( results = [] + transaction_id = None + transaction_id, new_transaction_options = helpers.get_transaction_options( + transaction + ) + read_options = helpers.get_read_options( + eventual, transaction_id, read_time, new_transaction_options + ) loop_num = 0 - read_options = helpers.get_read_options(eventual, transaction_id, read_time) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 request = { @@ -214,6 +220,10 @@ def _extended_lookup( **kwargs, ) + # set new transaction id if we just started a transaction + if transaction and lookup_response.transaction: + transaction._begin_with_id(lookup_response.transaction) + # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) @@ -570,7 +580,7 @@ def get_multi( eventual=eventual, missing=missing, deferred=deferred, - transaction_id=transaction and transaction.id, + transaction=transaction, retry=retry, timeout=timeout, read_time=read_time, diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py index e8894883..6eaa3b89 100644 --- a/google/cloud/datastore/helpers.py +++ b/google/cloud/datastore/helpers.py @@ -230,7 +230,9 @@ def entity_to_protobuf(entity): return entity_pb -def get_read_options(eventual, transaction_id, read_time=None): +def get_read_options( + eventual, transaction_id, read_time=None, new_transaction_options=None +): """Validate rules for read options, and assign to the request. Helper method for ``lookup()`` and ``run_query``. @@ -245,33 +247,55 @@ def get_read_options(eventual, transaction_id, read_time=None): :type read_time: datetime :param read_time: Read data from the specified time (may be null). This feature is in private preview. + :type new_transaction_options: :class:`google.cloud.datastore_v1.types.TransactionOptions` + :param new_transaction_options: Options for a new transaction. + :rtype: :class:`.datastore_pb2.ReadOptions` :returns: The read options corresponding to the inputs. :raises: :class:`ValueError` if more than one of ``eventual==True``, - ``transaction``, and ``read_time`` is specified. + ``transaction_id``, ``read_time``, and ``new_transaction_options`` is specified. """ - if transaction_id is None: - if eventual: - if read_time is not None: - raise ValueError("eventual must be False when read_time is specified") - else: - return datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) - else: - if read_time is None: - return datastore_pb2.ReadOptions() - else: - read_time_pb = timestamp_pb2.Timestamp() - read_time_pb.FromDatetime(read_time) - return datastore_pb2.ReadOptions(read_time=read_time_pb) - else: - if eventual: - raise ValueError("eventual must be False when in a transaction") - elif read_time is not None: - raise ValueError("transaction and read_time are mutual exclusive") - else: - return datastore_pb2.ReadOptions(transaction=transaction_id) + is_set = [ + bool(x) for x in (eventual, transaction_id, read_time, new_transaction_options) + ] + if sum(is_set) > 1: + raise ValueError( + "At most one of eventual, transaction, or read_time is allowed." + ) + new_options = datastore_pb2.ReadOptions() + if transaction_id is not None: + new_options.transaction = transaction_id + if read_time is not None: + read_time_pb = timestamp_pb2.Timestamp() + read_time_pb.FromDatetime(read_time) + new_options.read_time = read_time_pb + if new_transaction_options is not None: + new_options.new_transaction = new_transaction_options + if eventual: + new_options.read_consistency = ( + datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + ) + return new_options + + +def get_transaction_options(transaction): + """ + Get the transaction_id or new_transaction_options field from an active transaction object, + for use in get_read_options + + These are mutually-exclusive fields, so one or both will be None. + + :rtype: Tuple[Optional[bytes], Optional[google.cloud.datastore_v1.types.TransactionOptions]] + :returns: The transaction_id and new_transaction_options fields from the transaction object. + """ + transaction_id, new_transaction_options = None, None + if transaction is not None: + if transaction.id is not None: + transaction_id = transaction.id + elif transaction._begin_later and transaction._status == transaction._INITIAL: + # If the transaction has not yet been begun, we can use the new_transaction_options field. + new_transaction_options = transaction._options + return transaction_id, new_transaction_options def key_from_protobuf(pb): diff --git a/google/cloud/datastore/query.py b/google/cloud/datastore/query.py index 57c0702c..72d6fe51 100644 --- a/google/cloud/datastore/query.py +++ b/google/cloud/datastore/query.py @@ -778,13 +778,12 @@ def _next_page(self): return None query_pb = self._build_protobuf() - transaction = self.client.current_transaction - if transaction is None: - transaction_id = None - else: - transaction_id = transaction.id + new_transaction_options = None + transaction_id, new_transaction_options = helpers.get_transaction_options( + self.client.current_transaction + ) read_options = helpers.get_read_options( - self._eventual, transaction_id, self._read_time + self._eventual, transaction_id, self._read_time, new_transaction_options ) partition_id = entity_pb2.PartitionId( diff --git a/google/cloud/datastore/transaction.py b/google/cloud/datastore/transaction.py index 3e71ae26..52c17ce2 100644 --- a/google/cloud/datastore/transaction.py +++ b/google/cloud/datastore/transaction.py @@ -13,7 +13,6 @@ # limitations under the License. """Create / interact with Google Cloud Datastore transactions.""" - from google.cloud.datastore.batch import Batch from google.cloud.datastore_v1.types import TransactionOptions from google.protobuf import timestamp_pb2 @@ -149,15 +148,23 @@ class Transaction(Batch): :param read_time: (Optional) Time at which the transaction reads entities. Only allowed when ``read_only=True``. This feature is in private preview. + :type begin_later: bool + :param begin_later: (Optional) If True, the transaction will be started + lazily (i.e. when the first RPC is made). If False, + the transaction will be started as soon as the context manager + is entered. `self.begin()` can also be called manually to begin + the transaction at any time. Default is False. + :raises: :class:`ValueError` if read_time is specified when ``read_only=False``. """ _status = None - def __init__(self, client, read_only=False, read_time=None): + def __init__(self, client, read_only=False, read_time=None, begin_later=False): super(Transaction, self).__init__(client) self._id = None + self._begin_later = begin_later if read_only: if read_time is not None: @@ -180,8 +187,8 @@ def __init__(self, client, read_only=False, read_time=None): def id(self): """Getter for the transaction ID. - :rtype: str - :returns: The ID of the current transaction. + :rtype: bytes or None + :returns: The ID of the current transaction, or None if not started. """ return self._id @@ -240,6 +247,21 @@ def begin(self, retry=None, timeout=None): self._status = self._ABORTED raise + def _begin_with_id(self, transaction_id): + """ + Attach newly created transaction to an existing transaction ID. + + This is used when begin_later is True, when the first lookup request + associated with this transaction creates a new transaction ID. + + :type transaction_id: bytes + :param transaction_id: ID of the transaction to attach to. + """ + if self._status is not self._INITIAL: + raise ValueError("Transaction already begun.") + self._id = transaction_id + self._status = self._IN_PROGRESS + def rollback(self, retry=None, timeout=None): """Rolls back the current transaction. @@ -258,6 +280,12 @@ def rollback(self, retry=None, timeout=None): Note that if ``retry`` is specified, the timeout applies to each individual attempt. """ + # if transaction has not started, abort it + if self._status == self._INITIAL: + self._status = self._ABORTED + self._id = None + return None + kwargs = _make_retry_timeout_kwargs(retry, timeout) try: @@ -296,6 +324,15 @@ def commit(self, retry=None, timeout=None): Note that if ``retry`` is specified, the timeout applies to each individual attempt. """ + # if transaction has not begun, either begin now, or abort if empty + if self._status == self._INITIAL: + if not self._mutations: + self._status = self._ABORTED + self._id = None + return None + else: + self.begin() + kwargs = _make_retry_timeout_kwargs(retry, timeout) try: @@ -321,3 +358,18 @@ def put(self, entity): raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity) + + def __enter__(self): + if not self._begin_later: + self.begin() + self._client._push_batch(self) + return self + + def _allow_mutations(self): + """ + Mutations can be added to a transaction if it is in IN_PROGRESS state, + or if it is in INITIAL state and the begin_later flag is set. + """ + return self._status == self._IN_PROGRESS or ( + self._begin_later and self._status == self._INITIAL + ) diff --git a/tests/system/test_transaction.py b/tests/system/test_transaction.py index 6dc9dacd..2f7a6897 100644 --- a/tests/system/test_transaction.py +++ b/tests/system/test_transaction.py @@ -41,6 +41,57 @@ def test_transaction_via_with_statement( assert retrieved_entity == entity +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +@pytest.mark.parametrize("first_call", ["get", "put", "delete"]) +def test_transaction_begin_later( + datastore_client, entities_to_delete, database_id, first_call +): + """ + transactions with begin_later should call begin on first get rpc, or on commit + """ + key = datastore_client.key("Company", "Google") + entity = datastore.Entity(key=key) + entity["url"] = "www.google.com" + + datastore_client.put(entity) + result_entity = datastore_client.get(key) + + with datastore_client.transaction(begin_later=True) as xact: + assert xact._id is None + assert xact._status == xact._INITIAL + if first_call == "get": + datastore_client.get(entity.key) + assert xact._status == xact._IN_PROGRESS + assert xact._id is not None + elif first_call == "put": + xact.put(entity) + assert xact._status == xact._INITIAL + elif first_call == "delete": + xact.delete(result_entity.key) + assert xact._status == xact._INITIAL + assert xact._status == xact._FINISHED + + entities_to_delete.append(result_entity) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +@pytest.mark.parametrize("raise_exception", [True, False]) +def test_transaction_begin_later_noop(datastore_client, database_id, raise_exception): + """ + empty begin later transactions should terminate quietly + """ + try: + with datastore_client.transaction(begin_later=True) as xact: + assert xact._id is None + assert xact._status == xact._INITIAL + if raise_exception: + raise RuntimeError("test") + except RuntimeError: + pass + assert xact._status == xact._ABORTED + assert xact._id is None + + @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_transaction_via_explicit_begin_get_commit( datastore_client, entities_to_delete, database_id diff --git a/tests/unit/test_aggregation.py b/tests/unit/test_aggregation.py index 15d11aca..8284b808 100644 --- a/tests/unit/test_aggregation.py +++ b/tests/unit/test_aggregation.py @@ -471,7 +471,9 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None, database_id=None): if txn_id is None: client = _Client(project, datastore_api=ds_api, database=database_id) else: - transaction = mock.Mock(id=txn_id, spec=["id"]) + transaction = mock.Mock( + id=txn_id, _begin_later=False, spec=["id", "_begin_later"] + ) client = _Client( project, datastore_api=ds_api, transaction=transaction, database=database_id ) @@ -612,6 +614,57 @@ def test_transaction_id_populated(database_id, aggregation_type, aggregation_arg assert read_options.transaction == client.current_transaction.id +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,aggregation_args", + [ + ("count", ()), + ( + "sum", + ("appearances",), + ), + ("avg", ("appearances",)), + ], +) +def test_transaction_begin_later(database_id, aggregation_type, aggregation_args): + """ + When an aggregation is run in the context of a transaction with begin_later=True, + the new_transaction field should be populated in the request read_options. + """ + import mock + from google.cloud.datastore_v1.types import TransactionOptions + + # make a fake begin_later transaction + transaction = mock.Mock() + transaction.id = None + transaction._begin_later = True + transaction._status = transaction._INITIAL + transaction._options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_aggregation_query + mock_gapic.return_value = _make_aggregation_query_response([]) + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + # initiate requested aggregation (ex count, sum, avg) + getattr(aggregation_query, aggregation_type)(*aggregation_args) + # run mock query + list(aggregation_query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure new_transaction is populated + assert not read_options.transaction + assert read_options.new_transaction == transaction._options + + class _Client(object): def __init__( self, diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 412f3923..2b5c01f4 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -705,6 +705,52 @@ def test_client_get_multi_hit_w_transaction(database_id): ds_api.lookup.assert_called_once_with(request=expected_request) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_w_transaction_begin_later(database_id): + """ + Transactions with begin_later set should begin on first read + """ + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + kind = "Kind" + id_ = 1234 + expected_server_id = b"123" + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) + + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds, database=database_id) + lookup_response = _make_lookup_response( + results=[entity_pb], transaction=expected_server_id + ) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(kind, id_, project=PROJECT, database=database_id) + txn = client.transaction(begin_later=True) + assert txn._id is None + assert txn._status == txn._INITIAL + client.get_multi([key], transaction=txn) + + # transaction should now be started + assert txn._id == expected_server_id + assert txn._id is not None + assert txn._status == txn._IN_PROGRESS + + # check rpc args + expected_read_options = datastore_pb2.ReadOptions(new_transaction=txn._options) + expected_request = { + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": expected_read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_client_get_multi_hit_w_read_time(database_id): from datetime import datetime @@ -1847,7 +1893,7 @@ def _make_commit_response(*keys): return datastore_pb2.CommitResponse(mutation_results=mutation_results) -def _make_lookup_response(results=(), missing=(), deferred=()): +def _make_lookup_response(results=(), missing=(), deferred=(), transaction=None): entity_results_found = [ mock.Mock(entity=result, spec=["entity"]) for result in results ] @@ -1858,7 +1904,8 @@ def _make_lookup_response(results=(), missing=(), deferred=()): found=entity_results_found, missing=entity_results_missing, deferred=deferred, - spec=["found", "missing", "deferred"], + transaction=transaction, + spec=["found", "missing", "deferred", "transaction"], ) diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 89bf6165..38702dba 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -586,6 +586,91 @@ def test__get_read_options_w_default_wo_txn_w_read_time(): assert read_options == expected +def test__get_read_options_w_new_transaction(): + from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + input_options = datastore_pb2.TransactionOptions() + read_options = get_read_options(False, None, new_transaction_options=input_options) + expected = datastore_pb2.ReadOptions(new_transaction=input_options) + assert read_options == expected + + +@pytest.mark.parametrize( + "args", + [ + (True, "id"), + (True, "id", None), + (True, None, "read_time"), + (True, None, None, "new"), + (False, "id", "read_time"), + (False, "id", None, "new"), + (False, None, "read_time", "new"), + ], +) +def test__get_read_options_w_multiple_args(args): + """ + arguments are mutually exclusive. + Should raise ValueError if multiple are set + """ + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(*args) + + +def test__get_transaction_options_none(): + """ + test with empty transaction input + """ + from google.cloud.datastore.helpers import get_transaction_options + + t_id, new_t = get_transaction_options(None) + assert t_id is None + assert new_t is None + + +def test__get_transaction_options_w_id(): + """ + test with transaction with id set + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + expected_id = b"123abc" + txn = Transaction(None, begin_later=True) + txn._id = expected_id + t_id, new_t = get_transaction_options(txn) + assert t_id == expected_id + assert new_t is None + + +def test__get_transaction_options_w_begin_later(): + """ + if begin later is set and it hasn't begun, should return new_transaction_options + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + txn = Transaction(None, begin_later=True) + t_id, new_t = get_transaction_options(txn) + assert t_id is None + assert new_t is txn._options + + +def test__get_transaction_options_not_started(): + """ + If the transaction is noet set as begin_later, but it hasn't begun, return None for both + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + txn = Transaction(None, begin_later=False) + t_id, new_t = get_transaction_options(txn) + assert t_id is None + assert new_t is None + + def test__pb_attr_value_w_datetime_naive(): import calendar import datetime diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py index 84c0bedf..6c2063bb 100644 --- a/tests/unit/test_query.py +++ b/tests/unit/test_query.py @@ -667,7 +667,7 @@ def test_eventual_transaction_fails(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_id_populated(database_id): """ - When an aggregation is run in the context of a transaction, the transaction + When an query is run in the context of a transaction, the transaction ID should be populated in the request. """ import mock @@ -698,6 +698,47 @@ def test_transaction_id_populated(database_id): assert read_options.transaction == client.current_transaction.id +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_transaction_begin_later(database_id): + """ + When an aggregation is run in the context of a transaction with begin_later=True, + the new_transaction field should be populated in the request read_options. + """ + import mock + from google.cloud.datastore_v1.types import TransactionOptions + + # make a fake begin_later transaction + transaction = mock.Mock() + transaction.id = None + transaction._begin_later = True + transaction._status = transaction._INITIAL + transaction._options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_query + + more_results_enum = 3 # NO_MORE_RESULTS + response_pb = _make_query_response([], b"", more_results_enum, 0) + mock_gapic.return_value = response_pb + + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + # run mock query + list(query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure new_transaction is populated + assert not read_options.transaction + assert read_options.new_transaction == transaction._options + + def test_iterator_constructor_defaults(): query = object() client = object() @@ -885,7 +926,9 @@ def _next_page_helper( if txn_id is None: client = _Client(project, database=database, datastore_api=ds_api) else: - transaction = mock.Mock(id=txn_id, spec=["id"]) + transaction = mock.Mock( + id=txn_id, _begin_later=False, spec=["id", "_begin_later"] + ) client = _Client( project, database=database, datastore_api=ds_api, transaction=transaction ) diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index 23574ef4..cee384bb 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -81,6 +81,27 @@ def test_transaction_constructor_read_write_w_read_time(database_id): _make_transaction(client, read_only=False, read_time=read_time) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_constructor_begin_later(database_id): + from google.cloud.datastore.transaction import Transaction + + project = "PROJECT" + client = _Client(project, database=database_id) + expected_id = b"1234" + + xact = _make_transaction(client, begin_later=True) + assert xact._status == Transaction._INITIAL + assert xact.id is None + + xact._begin_with_id(expected_id) + assert xact._status == Transaction._IN_PROGRESS + assert xact.id == expected_id + + # calling a second time should raise exeception + with pytest.raises(ValueError): + xact._begin_with_id(expected_id) + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_current(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -375,6 +396,7 @@ def test_transaction_context_manager_no_raise(database_id): xact = _make_transaction(client) with xact: + assert xact._status == xact._IN_PROGRESS # only set between begin / commit assert xact.id == id_ @@ -427,6 +449,34 @@ class Foo(Exception): client._datastore_api.rollback.assert_called_once_with(request=expected_request) +@pytest.mark.parametrize("with_exception", [False, True]) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_context_manager_w_begin_later(database_id, with_exception): + """ + If begin_later is set, don't begin transaction when entering context manager + """ + project = "PROJECT" + id_ = 912830 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + xact = _make_transaction(client, begin_later=True) + + try: + with xact: + assert xact._status == xact._INITIAL + assert xact.id is None + if with_exception: + raise RuntimeError("expected") + except RuntimeError: + pass + # should be finalized after context manager block + assert xact._status == xact._ABORTED + assert xact.id is None + # no need to call commit or rollback + assert ds_api.commit.call_count == 0 + assert ds_api.rollback.call_count == 0 + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_put_read_only(database_id): project = "PROJECT" @@ -441,6 +491,100 @@ def test_transaction_put_read_only(database_id): xact.put(entity) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_put_w_begin_later(database_id): + """ + If begin_later is set, should be able to call put without begin first + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + assert len(xact.mutations) == 0 + xact.put(entity) + assert len(xact.mutations) == 1 + # should still be in initial state + assert xact._status == xact._INITIAL + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_delete_w_begin_later(database_id): + """ + If begin_later is set, should be able to call delete without begin first + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + assert len(xact.mutations) == 0 + xact.delete(entity.key.completed_key("name")) + assert len(xact.mutations) == 1 + # should still be in initial state + assert xact._status == xact._INITIAL + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_rollback_no_begin(database_id): + """ + If rollback is called without begin, transaciton should abort + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + xact.rollback() + begin.assert_not_called() + assert xact._status == xact._ABORTED + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_commit_no_begin(database_id): + """ + If commit is called without begin, and it has mutations staged, + should call begin before commit + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + xact.put(entity) + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + begin.side_effect = lambda: setattr(xact, "_status", xact._IN_PROGRESS) + xact.commit() + begin.assert_called_once_with() + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_empty_transaction_commit(database_id): + """ + If commit is called without begin, and it has no mutations staged, + should abort + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + xact.commit() + begin.assert_not_called() + assert xact._status == xact._ABORTED + + def _make_key(kind, id_, project, database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 From c86b51e7e56ad52c6ee9ca3c9de2db5553e4ec0a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 17:39:27 -0400 Subject: [PATCH 15/24] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#537) * chore(python): bump idna from 3.4 to 3.7 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d50980e704793a2d3310bfb3664f3a82f24b5796 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 * Apply changes from googleapis/synthtool#1950 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 5 +++++ .github/blunderbuss.yml | 23 +++++++++++++++++++++++ .kokoro/requirements.txt | 6 +++--- README.rst | 2 +- docs/index.rst | 5 +++++ docs/summary_overview.md | 22 ++++++++++++++++++++++ 7 files changed, 61 insertions(+), 6 deletions(-) create mode 100644 .github/blunderbuss.yml create mode 100644 docs/summary_overview.md diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 4bdeef39..81f87c56 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 -# created: 2024-03-15T16:25:47.905264637Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index b2016d11..8b37ee89 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 00000000..54156a1b --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,23 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners + +assign_prs: + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index dd61f5f3..51f92b8e 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ diff --git a/README.rst b/README.rst index 0d5b7f26..c5f944dc 100644 --- a/README.rst +++ b/README.rst @@ -19,7 +19,7 @@ consistency for all other queries. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ .. _Google Cloud Datastore API: https://cloud.google.com/datastore -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest/summary_overview .. _Product Documentation: https://cloud.google.com/datastore Quick Start diff --git a/docs/index.rst b/docs/index.rst index 890ec56a..abf8561b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -43,3 +43,8 @@ For a list of all ``google-cloud-datastore`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/docs/summary_overview.md b/docs/summary_overview.md new file mode 100644 index 00000000..2473abe7 --- /dev/null +++ b/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud Datastore API + +Overview of the APIs available for Google Cloud Datastore API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud Datastore API. + +[classes](https://cloud.google.com/python/docs/reference/datastore/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/datastore/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/datastore/latest/summary_property.html) From 7fd218b2afc0282d8fea21992e8d10c5eec72ac7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 09:36:02 -0700 Subject: [PATCH 16/24] feat(spanner): Add support for Cloud Spanner Scheduled Backups (#540) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 feat: New PropertyMask field which allows partial commits, lookups, and query results PiperOrigin-RevId: 635449160 Source-Link: https://github.com/googleapis/googleapis/commit/dde0ec1f36cb8cbf9036dd0f1e8e5eda7882db4e Source-Link: https://github.com/googleapis/googleapis-gen/commit/8caa60d9aea82964a19cdf8faf91384911db8bdd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGNhYTYwZDlhZWE4Mjk2NGExOWNkZjhmYWY5MTM4NDkxMWRiOGJkZCJ9 chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 120 ++- .../services/datastore_admin/client.py | 68 +- .../datastore_admin/transports/base.py | 4 +- .../datastore_admin/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 84 +- google/cloud/datastore_v1/__init__.py | 2 + .../services/datastore/async_client.py | 176 ++-- .../datastore_v1/services/datastore/client.py | 96 +- .../services/datastore/transports/base.py | 4 +- .../services/datastore/transports/grpc.py | 28 +- .../datastore/transports/grpc_asyncio.py | 114 ++- google/cloud/datastore_v1/types/__init__.py | 2 + google/cloud/datastore_v1/types/datastore.py | 66 ++ scripts/fixup_datastore_v1_keywords.py | 4 +- .../test_datastore_admin.py | 760 +++++++++++++- .../unit/gapic/datastore_v1/test_datastore.py | 950 ++++++++++++++++++ 16 files changed, 2203 insertions(+), 303 deletions(-) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index b564e8c1..4b7b0c8d 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -250,7 +252,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -262,9 +266,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -429,8 +435,8 @@ async def sample_export_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] ) @@ -440,7 +446,10 @@ async def sample_export_entities(): "the individual field arguments should be set." ) - request = datastore_admin.ExportEntitiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ExportEntitiesRequest): + request = datastore_admin.ExportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -456,11 +465,9 @@ async def sample_export_entities(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_entities + ] # Certain fields should be provided within the metadata header; # add these here. @@ -613,8 +620,8 @@ async def sample_import_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: raise ValueError( @@ -622,7 +629,10 @@ async def sample_import_entities(): "the individual field arguments should be set." ) - request = datastore_admin.ImportEntitiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ImportEntitiesRequest): + request = datastore_admin.ImportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -638,11 +648,9 @@ async def sample_import_entities(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_entities + ] # Certain fields should be provided within the metadata header; # add these here. @@ -747,15 +755,16 @@ async def sample_create_index(): """ # Create or coerce a protobuf request object. - request = datastore_admin.CreateIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.CreateIndexRequest): + request = datastore_admin.CreateIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -859,15 +868,16 @@ async def sample_delete_index(): """ # Create or coerce a protobuf request object. - request = datastore_admin.DeleteIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.DeleteIndexRequest): + request = datastore_admin.DeleteIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -952,25 +962,16 @@ async def sample_get_index(): Datastore composite index definition. """ # Create or coerce a protobuf request object. - request = datastore_admin.GetIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.GetIndexRequest): + request = datastore_admin.GetIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_index, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1056,25 +1057,16 @@ async def sample_list_indexes(): """ # Create or coerce a protobuf request object. - request = datastore_admin.ListIndexesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ListIndexesRequest): + request = datastore_admin.ListIndexesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_indexes, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_indexes + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index de174f58..e6f35ba3 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -565,7 +566,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreAdminTransport]] = None, + transport: Optional[ + Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -577,9 +580,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -688,8 +693,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DatastoreAdminTransport], Callable[..., DatastoreAdminTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatastoreAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -822,8 +834,8 @@ def sample_export_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] ) @@ -833,10 +845,8 @@ def sample_export_entities(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ExportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ExportEntitiesRequest): request = datastore_admin.ExportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1005,8 +1015,8 @@ def sample_import_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: raise ValueError( @@ -1014,10 +1024,8 @@ def sample_import_entities(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ImportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ImportEntitiesRequest): request = datastore_admin.ImportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1138,10 +1146,8 @@ def sample_create_index(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.CreateIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.CreateIndexRequest): request = datastore_admin.CreateIndexRequest(request) @@ -1251,10 +1257,8 @@ def sample_delete_index(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.DeleteIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.DeleteIndexRequest): request = datastore_admin.DeleteIndexRequest(request) @@ -1345,10 +1349,8 @@ def sample_get_index(): Datastore composite index definition. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.GetIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.GetIndexRequest): request = datastore_admin.GetIndexRequest(request) @@ -1440,10 +1442,8 @@ def sample_list_indexes(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ListIndexesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ListIndexesRequest): request = datastore_admin.ListIndexesRequest(request) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index bddab490..8c3a00f3 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -88,6 +88,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -100,7 +102,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 68867594..4d08d9c4 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -106,7 +106,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -126,14 +126,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -143,11 +146,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -174,9 +177,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -215,7 +219,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 367a5ab6..7526fc5c 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -121,7 +123,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -151,7 +152,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -171,15 +172,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -189,11 +193,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -220,9 +224,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -260,7 +265,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -517,6 +524,61 @@ def list_indexes( ) return self._stubs["list_indexes"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.export_entities: gapic_v1.method_async.wrap_method( + self.export_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.import_entities: gapic_v1.method_async.wrap_method( + self.import_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.create_index: gapic_v1.method_async.wrap_method( + self.create_index, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_index: gapic_v1.method_async.wrap_method( + self.delete_index, + default_timeout=60.0, + client_info=client_info, + ), + self.get_index: gapic_v1.method_async.wrap_method( + self.get_index, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_indexes: gapic_v1.method_async.wrap_method( + self.list_indexes, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index 8c9d09fe..b1855aff 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -33,6 +33,7 @@ from .types.datastore import LookupResponse from .types.datastore import Mutation from .types.datastore import MutationResult +from .types.datastore import PropertyMask from .types.datastore import ReadOptions from .types.datastore import ReserveIdsRequest from .types.datastore import ReserveIdsResponse @@ -98,6 +99,7 @@ "PlanSummary", "Projection", "PropertyFilter", + "PropertyMask", "PropertyOrder", "PropertyReference", "Query", diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index e911a362..d6c347f6 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -197,7 +199,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, DatastoreTransport, Callable[..., DatastoreTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -209,9 +213,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreTransport,Callable[..., DatastoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -330,8 +336,8 @@ async def sample_lookup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -339,7 +345,10 @@ async def sample_lookup(): "the individual field arguments should be set." ) - request = datastore.LookupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.LookupRequest): + request = datastore.LookupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -352,21 +361,7 @@ async def sample_lookup(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.lookup] # Certain fields should be provided within the metadata header; # add these here. @@ -443,25 +438,16 @@ async def sample_run_query(): """ # Create or coerce a protobuf request object. - request = datastore.RunQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RunQueryRequest): + request = datastore.RunQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_query + ] # Certain fields should be provided within the metadata header; # add these here. @@ -538,25 +524,16 @@ async def sample_run_aggregation_query(): """ # Create or coerce a protobuf request object. - request = datastore.RunAggregationQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RunAggregationQueryRequest): + request = datastore.RunAggregationQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_aggregation_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_aggregation_query + ] # Certain fields should be provided within the metadata header; # add these here. @@ -641,8 +618,8 @@ async def sample_begin_transaction(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: raise ValueError( @@ -650,7 +627,10 @@ async def sample_begin_transaction(): "the individual field arguments should be set." ) - request = datastore.BeginTransactionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.BeginTransactionRequest): + request = datastore.BeginTransactionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -659,11 +639,9 @@ async def sample_begin_transaction(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.begin_transaction + ] # Certain fields should be provided within the metadata header; # add these here. @@ -788,8 +766,8 @@ async def sample_commit(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: raise ValueError( @@ -797,7 +775,10 @@ async def sample_commit(): "the individual field arguments should be set." ) - request = datastore.CommitRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.CommitRequest): + request = datastore.CommitRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -812,11 +793,7 @@ async def sample_commit(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.commit] # Certain fields should be provided within the metadata header; # add these here. @@ -912,8 +889,8 @@ async def sample_rollback(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: raise ValueError( @@ -921,7 +898,10 @@ async def sample_rollback(): "the individual field arguments should be set." ) - request = datastore.RollbackRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RollbackRequest): + request = datastore.RollbackRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -932,11 +912,7 @@ async def sample_rollback(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] # Certain fields should be provided within the metadata header; # add these here. @@ -1032,8 +1008,8 @@ async def sample_allocate_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1041,7 +1017,10 @@ async def sample_allocate_ids(): "the individual field arguments should be set." ) - request = datastore.AllocateIdsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.AllocateIdsRequest): + request = datastore.AllocateIdsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1052,11 +1031,9 @@ async def sample_allocate_ids(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.allocate_ids, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.allocate_ids + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1151,8 +1128,8 @@ async def sample_reserve_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1160,7 +1137,10 @@ async def sample_reserve_ids(): "the individual field arguments should be set." ) - request = datastore.ReserveIdsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.ReserveIdsRequest): + request = datastore.ReserveIdsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1171,21 +1151,9 @@ async def sample_reserve_ids(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reserve_ids, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.reserve_ids + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 0a498175..6c3cb802 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -516,7 +517,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreTransport]] = None, + transport: Optional[ + Union[str, DatastoreTransport, Callable[..., DatastoreTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -528,9 +531,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreTransport,Callable[..., DatastoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -636,8 +641,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DatastoreTransport], Callable[..., DatastoreTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatastoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -724,8 +736,8 @@ def sample_lookup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -733,10 +745,8 @@ def sample_lookup(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.LookupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.LookupRequest): request = datastore.LookupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -832,10 +842,8 @@ def sample_run_query(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RunQueryRequest): request = datastore.RunQueryRequest(request) @@ -923,10 +931,8 @@ def sample_run_aggregation_query(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunAggregationQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RunAggregationQueryRequest): request = datastore.RunAggregationQueryRequest(request) @@ -1022,8 +1028,8 @@ def sample_begin_transaction(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1031,10 +1037,8 @@ def sample_begin_transaction(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.BeginTransactionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.BeginTransactionRequest): request = datastore.BeginTransactionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1174,8 +1178,8 @@ def sample_commit(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: raise ValueError( @@ -1183,10 +1187,8 @@ def sample_commit(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.CommitRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.CommitRequest): request = datastore.CommitRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1303,8 +1305,8 @@ def sample_rollback(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: raise ValueError( @@ -1312,10 +1314,8 @@ def sample_rollback(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RollbackRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RollbackRequest): request = datastore.RollbackRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1428,8 +1428,8 @@ def sample_allocate_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1437,10 +1437,8 @@ def sample_allocate_ids(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.AllocateIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.AllocateIdsRequest): request = datastore.AllocateIdsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1552,8 +1550,8 @@ def sample_reserve_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1561,10 +1559,8 @@ def sample_reserve_ids(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.ReserveIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.ReserveIdsRequest): request = datastore.ReserveIdsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index 3c31a4a7..db08f5b4 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -86,6 +86,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -98,7 +100,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index ebc16b21..620576e2 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -57,7 +57,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -77,14 +77,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -94,11 +97,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,9 +127,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -165,7 +169,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 7b3997dd..b826d7c6 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -72,7 +74,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -102,7 +103,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -122,15 +123,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -140,11 +144,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -170,9 +174,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -210,7 +215,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -460,6 +467,91 @@ def reserve_ids( ) return self._stubs["reserve_ids"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.lookup: gapic_v1.method_async.wrap_method( + self.lookup, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_query: gapic_v1.method_async.wrap_method( + self.run_query, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_aggregation_query: gapic_v1.method_async.wrap_method( + self.run_aggregation_query, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method_async.wrap_method( + self.begin_transaction, + default_timeout=60.0, + client_info=client_info, + ), + self.commit: gapic_v1.method_async.wrap_method( + self.commit, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method_async.wrap_method( + self.rollback, + default_timeout=60.0, + client_info=client_info, + ), + self.allocate_ids: gapic_v1.method_async.wrap_method( + self.allocate_ids, + default_timeout=60.0, + client_info=client_info, + ), + self.reserve_ids: gapic_v1.method_async.wrap_method( + self.reserve_ids, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index 6aa3d846..3ae809b4 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -28,6 +28,7 @@ LookupResponse, Mutation, MutationResult, + PropertyMask, ReadOptions, ReserveIdsRequest, ReserveIdsResponse, @@ -81,6 +82,7 @@ "LookupResponse", "Mutation", "MutationResult", + "PropertyMask", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index ccea0458..11974c3d 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -47,6 +47,7 @@ "ReserveIdsResponse", "Mutation", "MutationResult", + "PropertyMask", "ReadOptions", "TransactionOptions", }, @@ -70,6 +71,15 @@ class LookupRequest(proto.Message): The options for this lookup request. keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to return. Defaults to returning all + properties. + + If this field is set and an entity has a property not + referenced in the mask, it will be absent from + [LookupResponse.found.entity.properties][]. + + The entity's key is always returned. """ project_id: str = proto.Field( @@ -90,6 +100,11 @@ class LookupRequest(proto.Message): number=3, message=entity.Key, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=5, + message="PropertyMask", + ) class LookupResponse(proto.Message): @@ -186,6 +201,12 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to return. This field must not be set for a + projection query. + + See + [LookupRequest.property_mask][google.datastore.v1.LookupRequest.property_mask]. explain_options (google.cloud.datastore_v1.types.ExplainOptions): Optional. Explain options for the query. If set, additional query statistics will be @@ -223,6 +244,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=10, + message="PropertyMask", + ) explain_options: query_profile.ExplainOptions = proto.Field( proto.MESSAGE, number=12, @@ -770,6 +796,14 @@ class Mutation(proto.Message): mutation conflicts. This field is a member of `oneof`_ ``conflict_detection_strategy``. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to write in this mutation. None of the + properties in the mask may have a reserved name, except for + ``__key__``. This field is ignored for ``delete``. + + If the entity already exists, only properties referenced in + the mask are updated, others are left untouched. Properties + referenced in the mask but not in the entity are deleted. """ insert: entity.Entity = proto.Field( @@ -807,6 +841,11 @@ class Mutation(proto.Message): oneof="conflict_detection_strategy", message=timestamp_pb2.Timestamp, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=9, + message="PropertyMask", + ) class MutationResult(proto.Message): @@ -866,6 +905,33 @@ class MutationResult(proto.Message): ) +class PropertyMask(proto.Message): + r"""The set of arbitrarily nested property paths used to restrict + an operation to only a subset of properties in an entity. + + Attributes: + paths (MutableSequence[str]): + The paths to the properties covered by this mask. + + A path is a list of property names separated by dots + (``.``), for example ``foo.bar`` means the property ``bar`` + inside the entity property ``foo`` inside the entity + associated with this path. + + If a property name contains a dot ``.`` or a backslash + ``\``, then that name must be escaped. + + A path must not be empty, and may not reference a value + inside an [array + value][google.datastore.v1.Value.array_value]. + """ + + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class ReadOptions(proto.Message): r"""The options shared by read requests. diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index f0406904..661d509b 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -42,11 +42,11 @@ class datastoreCallTransformer(cst.CSTTransformer): 'allocate_ids': ('project_id', 'keys', 'database_id', ), 'begin_transaction': ('project_id', 'database_id', 'transaction_options', ), 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'single_use_transaction', 'mutations', ), - 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), + 'lookup': ('project_id', 'keys', 'database_id', 'read_options', 'property_mask', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'explain_options', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'explain_options', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'property_mask', 'explain_options', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index c08b309a..8e65052c 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1176,6 +1176,9 @@ def test_export_entities_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_entities() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1200,6 +1203,9 @@ def test_export_entities_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_entities(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1209,6 +1215,45 @@ def test_export_entities_non_empty_request_with_auto_populated_field(): ) +def test_export_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_entities] = mock_rpc + request = {} + client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_export_entities_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1230,6 +1275,51 @@ async def test_export_entities_empty_call_async(): assert args[0] == datastore_admin.ExportEntitiesRequest() +@pytest.mark.asyncio +async def test_export_entities_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_entities + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.export_entities + ] = mock_object + + request = {} + await client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest @@ -1482,6 +1572,9 @@ def test_import_entities_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.import_entities() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1506,6 +1599,9 @@ def test_import_entities_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.import_entities(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1515,6 +1611,45 @@ def test_import_entities_non_empty_request_with_auto_populated_field(): ) +def test_import_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_entities] = mock_rpc + request = {} + client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_import_entities_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1536,6 +1671,51 @@ async def test_import_entities_empty_call_async(): assert args[0] == datastore_admin.ImportEntitiesRequest() +@pytest.mark.asyncio +async def test_import_entities_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.import_entities + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.import_entities + ] = mock_object + + request = {} + await client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest @@ -1788,6 +1968,9 @@ def test_create_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1811,6 +1994,9 @@ def test_create_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1819,6 +2005,45 @@ def test_create_index_non_empty_request_with_auto_populated_field(): ) +def test_create_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc + request = {} + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1840,6 +2065,51 @@ async def test_create_index_empty_call_async(): assert args[0] == datastore_admin.CreateIndexRequest() +@pytest.mark.asyncio +async def test_create_index_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_index + ] = mock_object + + request = {} + await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest @@ -1980,6 +2250,9 @@ def test_delete_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2004,6 +2277,9 @@ def test_delete_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2013,6 +2289,45 @@ def test_delete_index_non_empty_request_with_auto_populated_field(): ) +def test_delete_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc + request = {} + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2034,6 +2349,51 @@ async def test_delete_index_empty_call_async(): assert args[0] == datastore_admin.DeleteIndexRequest() +@pytest.mark.asyncio +async def test_delete_index_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_index + ] = mock_object + + request = {} + await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest @@ -2187,6 +2547,9 @@ def test_get_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2211,6 +2574,9 @@ def test_get_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2220,6 +2586,41 @@ def test_get_index_non_empty_request_with_auto_populated_field(): ) +def test_get_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_index] = mock_rpc + request = {} + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2247,6 +2648,45 @@ async def test_get_index_empty_call_async(): assert args[0] == datastore_admin.GetIndexRequest() +@pytest.mark.asyncio +async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_index + ] = mock_object + + request = {} + await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest @@ -2401,6 +2841,9 @@ def test_list_indexes_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_indexes() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2426,6 +2869,9 @@ def test_list_indexes_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_indexes(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2436,6 +2882,41 @@ def test_list_indexes_non_empty_request_with_auto_populated_field(): ) +def test_list_indexes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_indexes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc + request = {} + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_indexes_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2459,6 +2940,47 @@ async def test_list_indexes_empty_call_async(): assert args[0] == datastore_admin.ListIndexesRequest() +@pytest.mark.asyncio +async def test_list_indexes_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_indexes + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_indexes + ] = mock_object + + request = {} + await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest @@ -2596,13 +3118,13 @@ def test_list_indexes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) pager = client.list_indexes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2784,6 +3306,46 @@ def test_export_entities_rest(request_type): assert response.operation.name == "operations/spam" +def test_export_entities_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_entities] = mock_rpc + + request = {} + client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_export_entities_rest_required_fields( request_type=datastore_admin.ExportEntitiesRequest, ): @@ -3060,6 +3622,46 @@ def test_import_entities_rest(request_type): assert response.operation.name == "operations/spam" +def test_import_entities_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_entities] = mock_rpc + + request = {} + client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_import_entities_rest_required_fields( request_type=datastore_admin.ImportEntitiesRequest, ): @@ -3411,6 +4013,46 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_create_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc + + request = {} + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3535,6 +4177,46 @@ def test_delete_index_rest(request_type): assert response.operation.name == "operations/spam" +def test_delete_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc + + request = {} + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3672,6 +4354,42 @@ def test_get_index_rest(request_type): assert response.state == index.Index.State.CREATING +def test_get_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_index] = mock_rpc + + request = {} + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3797,6 +4515,42 @@ def test_list_indexes_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_indexes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_indexes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc + + request = {} + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_indexes_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 203d9c3a..73f3d837 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1131,6 +1131,9 @@ def test_lookup_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1155,6 +1158,9 @@ def test_lookup_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1164,6 +1170,41 @@ def test_lookup_non_empty_request_with_auto_populated_field(): ) +def test_lookup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lookup] = mock_rpc + request = {} + client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1187,6 +1228,45 @@ async def test_lookup_empty_call_async(): assert args[0] == datastore.LookupRequest() +@pytest.mark.asyncio +async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.lookup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.lookup + ] = mock_object + + request = {} + await client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest @@ -1447,6 +1527,9 @@ def test_run_query_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1471,6 +1554,9 @@ def test_run_query_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1480,6 +1566,41 @@ def test_run_query_non_empty_request_with_auto_populated_field(): ) +def test_run_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_query] = mock_rpc + request = {} + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1503,6 +1624,45 @@ async def test_run_query_empty_call_async(): assert args[0] == datastore.RunQueryRequest() +@pytest.mark.asyncio +async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_query + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_query + ] = mock_object + + request = {} + await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest @@ -1633,6 +1793,9 @@ def test_run_aggregation_query_empty_call(): with mock.patch.object( type(client.transport.run_aggregation_query), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_aggregation_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1659,6 +1822,9 @@ def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.run_aggregation_query), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_aggregation_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1668,6 +1834,46 @@ def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): ) +def test_run_aggregation_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_aggregation_query + ] = mock_rpc + request = {} + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_aggregation_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1693,6 +1899,47 @@ async def test_run_aggregation_query_empty_call_async(): assert args[0] == datastore.RunAggregationQueryRequest() +@pytest.mark.asyncio +async def test_run_aggregation_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_aggregation_query + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_aggregation_query + ] = mock_object + + request = {} + await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest @@ -1829,6 +2076,9 @@ def test_begin_transaction_empty_call(): with mock.patch.object( type(client.transport.begin_transaction), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.begin_transaction() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1855,6 +2105,9 @@ def test_begin_transaction_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.begin_transaction), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.begin_transaction(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1864,6 +2117,43 @@ def test_begin_transaction_non_empty_request_with_auto_populated_field(): ) +def test_begin_transaction_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_begin_transaction_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1889,6 +2179,47 @@ async def test_begin_transaction_empty_call_async(): assert args[0] == datastore.BeginTransactionRequest() +@pytest.mark.asyncio +async def test_begin_transaction_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.begin_transaction + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.begin_transaction + ] = mock_object + + request = {} + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest @@ -2107,6 +2438,9 @@ def test_commit_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2131,6 +2465,9 @@ def test_commit_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2140,6 +2477,41 @@ def test_commit_non_empty_request_with_auto_populated_field(): ) +def test_commit_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_commit_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2163,6 +2535,45 @@ async def test_commit_empty_call_async(): assert args[0] == datastore.CommitRequest() +@pytest.mark.asyncio +async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.commit + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.commit + ] = mock_object + + request = {} + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest @@ -2450,6 +2861,9 @@ def test_rollback_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2474,6 +2888,9 @@ def test_rollback_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2483,6 +2900,41 @@ def test_rollback_non_empty_request_with_auto_populated_field(): ) +def test_rollback_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2504,6 +2956,45 @@ async def test_rollback_empty_call_async(): assert args[0] == datastore.RollbackRequest() +@pytest.mark.asyncio +async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.rollback + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.rollback + ] = mock_object + + request = {} + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest @@ -2716,6 +3207,9 @@ def test_allocate_ids_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.allocate_ids() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2740,6 +3234,9 @@ def test_allocate_ids_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.allocate_ids(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2749,6 +3246,41 @@ def test_allocate_ids_non_empty_request_with_auto_populated_field(): ) +def test_allocate_ids_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.allocate_ids] = mock_rpc + request = {} + client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_allocate_ids_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2770,6 +3302,47 @@ async def test_allocate_ids_empty_call_async(): assert args[0] == datastore.AllocateIdsRequest() +@pytest.mark.asyncio +async def test_allocate_ids_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.allocate_ids + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.allocate_ids + ] = mock_object + + request = {} + await client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest @@ -3002,6 +3575,9 @@ def test_reserve_ids_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.reserve_ids() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3026,6 +3602,9 @@ def test_reserve_ids_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.reserve_ids(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3035,6 +3614,41 @@ def test_reserve_ids_non_empty_request_with_auto_populated_field(): ) +def test_reserve_ids_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reserve_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reserve_ids] = mock_rpc + request = {} + client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_reserve_ids_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3056,6 +3670,47 @@ async def test_reserve_ids_empty_call_async(): assert args[0] == datastore.ReserveIdsRequest() +@pytest.mark.asyncio +async def test_reserve_ids_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.reserve_ids + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.reserve_ids + ] = mock_object + + request = {} + await client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest @@ -3285,6 +3940,42 @@ def test_lookup_rest(request_type): assert response.transaction == b"transaction_blob" +def test_lookup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lookup] = mock_rpc + + request = {} + client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): transport_class = transports.DatastoreRestTransport @@ -3569,6 +4260,42 @@ def test_run_query_rest(request_type): assert response.transaction == b"transaction_blob" +def test_run_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_query] = mock_rpc + + request = {} + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): transport_class = transports.DatastoreRestTransport @@ -3774,6 +4501,47 @@ def test_run_aggregation_query_rest(request_type): assert response.transaction == b"transaction_blob" +def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_aggregation_query + ] = mock_rpc + + request = {} + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_run_aggregation_query_rest_required_fields( request_type=datastore.RunAggregationQueryRequest, ): @@ -3983,6 +4751,44 @@ def test_begin_transaction_rest(request_type): assert response.transaction == b"transaction_blob" +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc + + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_begin_transaction_rest_required_fields( request_type=datastore.BeginTransactionRequest, ): @@ -4248,6 +5054,42 @@ def test_commit_rest(request_type): assert response.index_updates == 1389 +def test_commit_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_commit_rest_required_fields(request_type=datastore.CommitRequest): transport_class = transports.DatastoreRestTransport @@ -4530,6 +5372,42 @@ def test_rollback_rest(request_type): assert isinstance(response, datastore.RollbackResponse) +def test_rollback_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): transport_class = transports.DatastoreRestTransport @@ -4801,6 +5679,42 @@ def test_allocate_ids_rest(request_type): assert isinstance(response, datastore.AllocateIdsResponse) +def test_allocate_ids_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.allocate_ids] = mock_rpc + + request = {} + client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsRequest): transport_class = transports.DatastoreRestTransport @@ -5076,6 +5990,42 @@ def test_reserve_ids_rest(request_type): assert isinstance(response, datastore.ReserveIdsResponse) +def test_reserve_ids_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reserve_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reserve_ids] = mock_rpc + + request = {} + client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsRequest): transport_class = transports.DatastoreRestTransport From e65c532b3f2c4c303790fab676a8d5684eef48e9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 29 Jul 2024 08:14:15 -0600 Subject: [PATCH 17/24] chore: update mypy target (#554) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update mypy package target * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- noxfile.py | 2 +- owlbot.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index e4e112d5..a3084c38 100644 --- a/noxfile.py +++ b/noxfile.py @@ -143,7 +143,7 @@ def mypy(session): "types-protobuf!=4.24.0.20240106", "types-requests", ) - session.run("mypy", "-p", "google") + session.run("mypy", "-p", "google.cloud.datastore") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/owlbot.py b/owlbot.py index 82565aa3..3b8149e6 100644 --- a/owlbot.py +++ b/owlbot.py @@ -285,7 +285,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf!=4.24.0.20240106", "types-requests" ) - session.run("mypy", "-p", "google") + session.run("mypy", "-p", "google.cloud.datastore") @nox.session(python=DEFAULT_PYTHON_VERSION) From e76061473047ca1d630ccd4197b043264607f586 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 29 Jul 2024 13:47:43 -0400 Subject: [PATCH 18/24] chore: update templated files (#550) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update templated files * remove obsolete replacements in owlbot.py * update replacements in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 3 +- .github/auto-label.yaml | 2 +- .kokoro/build.sh | 2 +- .kokoro/docker/docs/Dockerfile | 23 +- .kokoro/docker/docs/requirements.txt | 40 ++- .kokoro/populate-secrets.sh | 2 +- .kokoro/publish-docs.sh | 2 +- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 509 ++++++++++++++------------- .kokoro/test-samples-against-head.sh | 2 +- .kokoro/test-samples-impl.sh | 2 +- .kokoro/test-samples.sh | 2 +- .kokoro/trampoline.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- .pre-commit-config.yaml | 2 +- .trampolinerc | 2 +- MANIFEST.in | 2 +- docs/conf.py | 2 +- noxfile.py | 61 +++- owlbot.py | 31 +- scripts/decrypt-secrets.sh | 2 +- scripts/readme-gen/readme_gen.py | 2 +- 23 files changed, 377 insertions(+), 324 deletions(-) diff --git a/.flake8 b/.flake8 index 87f6e408..32986c79 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 81f87c56..001b1b1c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 8b37ee89..21786a4e 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/build.sh b/.kokoro/build.sh index f9800c92..e0cd71b0 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index bdaf39fe..5205308b 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 0e5d70f2..7129c771 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f397214..c435402f 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 9eafe0be..38f083f0 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index c236e3cf..b460d5a0 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 51f92b8e..9622baf0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41df..e9d8bd79 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 5a0f5fab..55910c8b 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48..7933d820 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f26..48f79699 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a..35fa5292 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8e1695..1d74695f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index a7dfeb42..00801523 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/MANIFEST.in b/MANIFEST.in index e0a66705..d6814cd6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/conf.py b/docs/conf.py index 3e814f57..8122be71 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index a3084c38..4a08c70f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -181,14 +181,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -202,15 +216,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -304,7 +315,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" @@ -396,10 +407,17 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -434,9 +452,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -462,12 +480,17 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") - env = {} # Only run system tests if found. if os.path.exists(system_test_path): session.run( @@ -475,8 +498,10 @@ def prerelease_deps(session): "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, - env=env, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -484,6 +509,8 @@ def prerelease_deps(session): "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, - env=env, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/owlbot.py b/owlbot.py index 3b8149e6..9fcf0e15 100644 --- a/owlbot.py +++ b/owlbot.py @@ -137,33 +137,30 @@ def system(session, disable_grpc): if disable_grpc: env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" -# Run py.test against the system tests. + # Run py.test against the system tests. """, ) assert 1 == s.replace( "noxfile.py", - """\ - # Only run system tests if found. - """, - """\ - env = {} - # Only run system tests if found. - """, -) - -assert 2 == s.replace( - "noxfile.py", - """system_test_path,\n""", """system_test_path, - env=env,\n""", + \*session.posargs, + \)""", + """system_test_path, + env=env, + *session.posargs, + )""", ) -assert 2 == s.replace( +assert 1 == s.replace( "noxfile.py", - """system_test_folder_path,\n""", """system_test_folder_path, - env=env,\n""", + \*session.posargs, + \)""", + """system_test_folder_path, + env=env, + *session.posargs, + )""", ) # Add nox session to exercise doctests diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421..120b0ddc 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc1198..8f5e248a 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 4982f9a6cbbe2de449535295a363a2dd49538c86 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 30 Jul 2024 13:08:14 -0400 Subject: [PATCH 19/24] fix: Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. (#552) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- google/cloud/datastore/query.py | 2 +- tests/unit/test_query.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/google/cloud/datastore/query.py b/google/cloud/datastore/query.py index 72d6fe51..c6d27855 100644 --- a/google/cloud/datastore/query.py +++ b/google/cloud/datastore/query.py @@ -825,7 +825,7 @@ def _next_page(self): old_query_pb = query_pb query_pb = query_pb2.Query() query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - query_pb.start_cursor = response_pb.batch.skipped_cursor + query_pb.start_cursor = response_pb.batch.end_cursor query_pb.offset -= response_pb.batch.skipped_results request = { diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py index 6c2063bb..fa7d63dc 100644 --- a/tests/unit/test_query.py +++ b/tests/unit/test_query.py @@ -1019,7 +1019,8 @@ def test_iterator__next_page_no_more(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) -def test_iterator__next_page_w_skipped_lt_offset(database_id): +@pytest.mark.parametrize("skipped_cursor_1", [b"DEADBEEF", b""]) +def test_iterator__next_page_w_skipped_lt_offset(skipped_cursor_1, database_id): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -1028,16 +1029,17 @@ def test_iterator__next_page_w_skipped_lt_offset(database_id): project = "prujekt" skipped_1 = 100 - skipped_cursor_1 = b"DEADBEEF" + end_cursor_1 = b"DEADBEEF" skipped_2 = 50 - skipped_cursor_2 = b"FACEDACE" + end_cursor_2 = b"FACEDACE" more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED result_1 = _make_query_response([], b"", more_enum, skipped_1) result_1.batch.skipped_cursor = skipped_cursor_1 + result_1.batch.end_cursor = end_cursor_1 result_2 = _make_query_response([], b"", more_enum, skipped_2) - result_2.batch.skipped_cursor = skipped_cursor_2 + result_2.batch.end_cursor = end_cursor_2 ds_api = _make_datastore_api(result_1, result_2) client = _Client(project, datastore_api=ds_api, database=database_id) @@ -1055,9 +1057,7 @@ def test_iterator__next_page_w_skipped_lt_offset(database_id): read_options = datastore_pb2.ReadOptions() query_1 = query_pb2.Query(offset=offset) - query_2 = query_pb2.Query( - start_cursor=skipped_cursor_1, offset=(offset - skipped_1) - ) + query_2 = query_pb2.Query(start_cursor=end_cursor_1, offset=(offset - skipped_1)) expected_calls = [] for query in [query_1, query_2]: expected_request = { From 5e773cb8c766303fef53965dd100b3c4c93b98be Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:12:48 -0400 Subject: [PATCH 20/24] fix: Retry and timeout values do not propagate in requests during pagination (#555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../services/datastore_admin/async_client.py | 2 + .../services/datastore_admin/client.py | 2 + .../services/datastore_admin/pagers.py | 41 ++++++++++++++++++- .../test_datastore_admin.py | 7 +++- .../unit/gapic/datastore_v1/test_datastore.py | 1 + 5 files changed, 50 insertions(+), 3 deletions(-) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 4b7b0c8d..80fb6bbf 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1093,6 +1093,8 @@ async def sample_list_indexes(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index e6f35ba3..ce21fdbd 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1476,6 +1476,8 @@ def sample_list_indexes(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index eb4bd0dc..dc61026b 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -52,6 +65,8 @@ def __init__( request: datastore_admin.ListIndexesRequest, response: datastore_admin.ListIndexesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -63,12 +78,17 @@ def __init__( The initial request object. response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -79,7 +99,12 @@ def pages(self) -> Iterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[index.Index]: @@ -114,6 +139,8 @@ def __init__( request: datastore_admin.ListIndexesRequest, response: datastore_admin.ListIndexesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -125,12 +152,17 @@ def __init__( The initial request object. response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -141,7 +173,12 @@ async def pages(self) -> AsyncIterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[index.Index]: diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 8e65052c..85922425 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -47,6 +47,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_admin_v1.services.datastore_admin import ( @@ -3119,12 +3120,16 @@ def test_list_indexes_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) - pager = client.list_indexes(request={}) + pager = client.list_indexes(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 73f3d837..65289ad3 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient From ba2001993b3e6fa59651a0391846e2d3c20d92b2 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 7 Aug 2024 10:09:18 -0400 Subject: [PATCH 21/24] test: Added system test for query offset issue (#557) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: Added system test for query offset issue * linting * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fixed test * Removed testing the default database --------- Co-authored-by: Owl Bot --- tests/system/test_query.py | 34 +++++++++ tests/system/utils/clear_datastore.py | 10 ++- tests/system/utils/populate_datastore.py | 93 +++++++++++++++++++++++- 3 files changed, 132 insertions(+), 5 deletions(-) diff --git a/tests/system/test_query.py b/tests/system/test_query.py index 9f902205..b9574789 100644 --- a/tests/system/test_query.py +++ b/tests/system/test_query.py @@ -337,6 +337,17 @@ def large_query_client(datastore_client): return large_query_client +@pytest.fixture(scope="session") +def mergejoin_query_client(datastore_client): + mergejoin_query_client = _helpers.clone_client( + datastore_client, + namespace=populate_datastore.MERGEJOIN_DATASET_NAMESPACE, + ) + populate_datastore.add_mergejoin_dataset_entities(client=mergejoin_query_client) + + return mergejoin_query_client + + @pytest.fixture(scope="function") def large_query(large_query_client): # Use the client for this test instead of the global. @@ -346,6 +357,15 @@ def large_query(large_query_client): ) +@pytest.fixture(scope="function") +def mergejoin_query(mergejoin_query_client): + # Use the client for this test instead of the global. + return mergejoin_query_client.query( + kind=populate_datastore.MERGEJOIN_DATASET_KIND, + namespace=populate_datastore.MERGEJOIN_DATASET_NAMESPACE, + ) + + @pytest.mark.parametrize( "limit,offset,expected", [ @@ -385,6 +405,20 @@ def test_large_query(large_query, limit, offset, expected, database_id): assert len(entities) == expected +@pytest.mark.parametrize("database_id", [_helpers.TEST_DATABASE], indirect=True) +def test_mergejoin_query(mergejoin_query, database_id): + query = mergejoin_query + query.add_filter(filter=PropertyFilter("a", "=", 1)) + query.add_filter(filter=PropertyFilter("b", "=", 1)) + + # There should be 2 * MERGEJOIN_QUERY_NUM_RESULTS results total + expected_total = 2 * populate_datastore.MERGEJOIN_QUERY_NUM_RESULTS + for offset in range(0, expected_total + 1): + iterator = query.fetch(offset=offset) + num_entities = len([e for e in iterator]) + assert num_entities == expected_total - offset + + @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_query_add_property_filter(ancestor_query, database_id): query = ancestor_query diff --git a/tests/system/utils/clear_datastore.py b/tests/system/utils/clear_datastore.py index 2082bce7..05a63b31 100644 --- a/tests/system/utils/clear_datastore.py +++ b/tests/system/utils/clear_datastore.py @@ -31,6 +31,8 @@ "Post", "uuid_key", "timestamp_key", + "LargeCharacter", + "Mergejoin", ) TRANSACTION_MAX_GROUPS = 5 MAX_DEL_ENTITIES = 500 @@ -90,12 +92,10 @@ def remove_all_entities(client): def run(database): - client = datastore.Client(database=database) kinds = sys.argv[1:] if len(kinds) == 0: kinds = ALL_KINDS - print_func( "This command will remove all entities from the database " + database @@ -105,8 +105,10 @@ def run(database): response = input("Is this OK [y/n]? ") if response.lower() == "y": - for kind in kinds: - remove_kind(kind, client) + for namespace in ["", "LargeCharacterEntity", "MergejoinNamespace"]: + client = datastore.Client(database=database, namespace=namespace) + for kind in kinds: + remove_kind(kind, client) else: print_func("Doing nothing.") diff --git a/tests/system/utils/populate_datastore.py b/tests/system/utils/populate_datastore.py index 9077241f..0eea15fb 100644 --- a/tests/system/utils/populate_datastore.py +++ b/tests/system/utils/populate_datastore.py @@ -58,6 +58,11 @@ LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" LARGE_CHARACTER_KIND = "LargeCharacter" +MERGEJOIN_QUERY_NUM_RESULTS = 7 +MERGEJOIN_DATASET_INTERMEDIATE_OBJECTS = 20000 +MERGEJOIN_DATASET_NAMESPACE = "MergejoinNamespace" +MERGEJOIN_DATASET_KIND = "Mergejoin" + def get_system_test_db(): return os.getenv("SYSTEM_TESTS_DATABASE") or "system-tests-named-db" @@ -179,12 +184,92 @@ def add_timestamp_keys(client=None): batch.put(entity) +def add_mergejoin_dataset_entities(client=None): + """ + Dataset to account for one bug that was seen in https://github.com/googleapis/python-datastore/issues/547 + The root cause of this is us setting a subsequent query's start_cursor to skipped_cursor instead of end_cursor. + In niche scenarios involving mergejoins, skipped_cursor becomes empty and the query starts back from the beginning, + returning duplicate items. + + This bug is able to be reproduced with a dataset shown in b/352377540, with 7 items of a=1, b=1 + followed by 20k items of alternating a=1, b=0 and a=0, b=1, then 7 more a=1, b=1, then querying for all + items with a=1, b=1 and an offset of 8. + """ + client.namespace = MERGEJOIN_DATASET_NAMESPACE + + # Query used for all tests + page_query = client.query( + kind=MERGEJOIN_DATASET_KIND, namespace=MERGEJOIN_DATASET_NAMESPACE + ) + + def create_entity(id, a, b): + key = client.key(MERGEJOIN_DATASET_KIND, id) + entity = datastore.Entity(key=key) + entity["a"] = a + entity["b"] = b + return entity + + def put_objects(count): + id = 1 + curr_intermediate_entries = 0 + + # Can only do 500 operations in a transaction with an overall + # size limit. + ENTITIES_TO_BATCH = 500 + + with client.transaction() as xact: + for _ in range(0, MERGEJOIN_QUERY_NUM_RESULTS): + entity = create_entity(id, 1, 1) + id += 1 + xact.put(entity) + + while curr_intermediate_entries < count - MERGEJOIN_QUERY_NUM_RESULTS: + start = curr_intermediate_entries + end = min(curr_intermediate_entries + ENTITIES_TO_BATCH, count) + with client.transaction() as xact: + # The name/ID for the new entity + for i in range(start, end): + if id % 2: + entity = create_entity(id, 0, 1) + else: + entity = create_entity(id, 1, 0) + id += 1 + + # Saves the entity + xact.put(entity) + curr_intermediate_entries += ENTITIES_TO_BATCH + + with client.transaction() as xact: + for _ in range(0, MERGEJOIN_QUERY_NUM_RESULTS): + entity = create_entity(id, 1, 1) + id += 1 + xact.put(entity) + + # If anything exists in this namespace, delete it, since we need to + # set up something very specific. + all_entities = [e for e in page_query.fetch()] + if len(all_entities) > 0: + # Cleanup Collection if not an exact match + while all_entities: + entities = all_entities[:500] + all_entities = all_entities[500:] + client.delete_multi([e.key for e in entities]) + # Put objects + put_objects(MERGEJOIN_DATASET_INTERMEDIATE_OBJECTS) + + def run(database): client = datastore.Client(database=database) flags = sys.argv[1:] if len(flags) == 0: - flags = ["--characters", "--uuid", "--timestamps"] + flags = [ + "--characters", + "--uuid", + "--timestamps", + "--large-characters", + "--mergejoin", + ] if "--characters" in flags: add_characters(client) @@ -195,6 +280,12 @@ def run(database): if "--timestamps" in flags: add_timestamp_keys(client) + if "--large-characters" in flags: + add_large_character_entities(client) + + if "--mergejoin" in flags: + add_mergejoin_dataset_entities(client) + def main(): for database in ["", get_system_test_db()]: From 1500f7007f251256ce2923e1168439d40d41cc4d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 7 Aug 2024 14:57:47 -0600 Subject: [PATCH 22/24] feat: implement query profiling (#542) --- google/cloud/datastore/__init__.py | 12 +- google/cloud/datastore/aggregation.py | 99 ++++++++---- google/cloud/datastore/client.py | 4 +- google/cloud/datastore/query.py | 93 ++++++++---- google/cloud/datastore/query_profile.py | 143 ++++++++++++++++++ samples/snippets/snippets.py | 110 +++++++++++++- samples/snippets/snippets_test.py | 72 ++++++--- tests/system/test_aggregation_query.py | 141 +++++++++++++++++ tests/system/test_query.py | 124 ++++++++++++++- tests/unit/test_aggregation.py | 193 +++++++++++++++++++++++- tests/unit/test_query.py | 153 +++++++++++++++++++ tests/unit/test_query_profile.py | 126 ++++++++++++++++ 12 files changed, 1170 insertions(+), 100 deletions(-) create mode 100644 google/cloud/datastore/query_profile.py create mode 100644 tests/unit/test_query_profile.py diff --git a/google/cloud/datastore/__init__.py b/google/cloud/datastore/__init__.py index b2b4c172..d6fa310b 100644 --- a/google/cloud/datastore/__init__.py +++ b/google/cloud/datastore/__init__.py @@ -61,6 +61,16 @@ from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query +from google.cloud.datastore.query_profile import ExplainOptions from google.cloud.datastore.transaction import Transaction -__all__ = ["__version__", "Batch", "Client", "Entity", "Key", "Query", "Transaction"] +__all__ = [ + "__version__", + "Batch", + "Client", + "Entity", + "Key", + "Query", + "ExplainOptions", + "Transaction", +] diff --git a/google/cloud/datastore/aggregation.py b/google/cloud/datastore/aggregation.py index 1384f332..dcb81236 100644 --- a/google/cloud/datastore/aggregation.py +++ b/google/cloud/datastore/aggregation.py @@ -23,15 +23,11 @@ from google.cloud.datastore import helpers from google.cloud.datastore.query import _pb_from_query +from google.cloud.datastore.query_profile import ExplainMetrics +from google.cloud.datastore.query_profile import QueryExplainError -_NOT_FINISHED = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED -_NO_MORE_RESULTS = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS - -_FINISHED = ( - _NO_MORE_RESULTS, - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, -) +from google.cloud.datastore.query import _NOT_FINISHED +from google.cloud.datastore.query import _FINISHED class BaseAggregation(ABC): @@ -159,16 +155,25 @@ class AggregationQuery(object): :type query: :class:`google.cloud.datastore.query.Query` :param query: The query used for aggregations. + + :type explain_options: :class:`~google.cloud.datastore.ExplainOptions` + :param explain_options: (Optional) Options to enable query profiling for + this query. When set, explain_metrics will be available on the iterator + returned by query.fetch(). + If not passed, will use value from given query. """ def __init__( self, client, query, + explain_options=None, ): self._client = client self._nested_query = query self._aggregations = [] + # fallback to query._explain_options if not set + self._explain_options = explain_options or query._explain_options @property def project(self): @@ -391,6 +396,7 @@ def __init__( self._read_time = read_time self._limit = limit # The attributes below will change over the life of the iterator. + self._explain_metrics = None self._more_results = True def _build_protobuf(self): @@ -441,7 +447,6 @@ def _next_page(self): if not self._more_results: return None - query_pb = self._build_protobuf() transaction_id, new_transaction_options = helpers.get_transaction_options( self.client.current_transaction ) @@ -466,38 +471,68 @@ def _next_page(self): "project_id": self._aggregation_query.project, "partition_id": partition_id, "read_options": read_options, - "aggregation_query": query_pb, + "aggregation_query": self._build_protobuf(), } + if self._aggregation_query._explain_options: + request[ + "explain_options" + ] = self._aggregation_query._explain_options._to_dict() helpers.set_database_id_to_request(request, self.client.database) - response_pb = self.client._datastore_api.run_aggregation_query( - request=request, - **kwargs, - ) - while response_pb.batch.more_results == _NOT_FINISHED: - # We haven't finished processing. A likely reason is we haven't - # skipped all of the results yet. Don't return any results. - # Instead, rerun query, adjusting offsets. Datastore doesn't process - # more than 1000 skipped results in a query. - old_query_pb = query_pb - query_pb = query_pb2.AggregationQuery() - query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - - request = { - "project_id": self._aggregation_query.project, - "partition_id": partition_id, - "read_options": read_options, - "aggregation_query": query_pb, - } - helpers.set_database_id_to_request(request, self.client.database) + response_pb = None + + while response_pb is None or response_pb.batch.more_results == _NOT_FINISHED: + if response_pb is not None: + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + new_query_pb = query_pb2.AggregationQuery() + new_query_pb._pb.CopyFrom( + request["aggregation_query"]._pb + ) # copy for testability + request["aggregation_query"] = new_query_pb + response_pb = self.client._datastore_api.run_aggregation_query( - request=request, - **kwargs, + request=request.copy(), **kwargs ) + # capture explain metrics if present in response + # should only be present in last response, and only if explain_options was set + if response_pb.explain_metrics: + self._explain_metrics = ExplainMetrics._from_pb( + response_pb.explain_metrics + ) item_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, item_pbs, self.item_to_value) + @property + def explain_metrics(self) -> ExplainMetrics: + """ + Get the metrics associated with the query execution. + Metrics are only available when explain_options is set on the query. If + ExplainOptions.analyze is False, only plan_summary is available. If it is + True, execution_stats is also available. + + :rtype: :class:`~google.cloud.datastore.query_profile.ExplainMetrics` + :returns: The metrics associated with the query execution. + :raises: :class:`~google.cloud.datastore.query_profile.QueryExplainError` + if explain_metrics is not available on the query. + """ + if self._explain_metrics is not None: + return self._explain_metrics + elif self._aggregation_query._explain_options is None: + raise QueryExplainError("explain_options not set on query.") + elif self._aggregation_query._explain_options.analyze is False: + # we need to run the query to get the explain_metrics + # analyze=False only returns explain_metrics, no results + self._next_page() + if self._explain_metrics is not None: + return self._explain_metrics + raise QueryExplainError( + "explain_metrics not available until query is complete." + ) + # pylint: disable=unused-argument def _item_to_aggregation_result(iterator, pb): diff --git a/google/cloud/datastore/client.py b/google/cloud/datastore/client.py index b1e79d91..ca3d4e0c 100644 --- a/google/cloud/datastore/client.py +++ b/google/cloud/datastore/client.py @@ -875,7 +875,7 @@ def do_something_with(entity): kwargs["namespace"] = self.namespace return Query(self, **kwargs) - def aggregation_query(self, query): + def aggregation_query(self, query, **kwargs): """Proxy to :class:`google.cloud.datastore.aggregation.AggregationQuery`. Using aggregation_query to count over a query: @@ -953,7 +953,7 @@ def do_something_with(entity): :rtype: :class:`~google.cloud.datastore.aggregation.AggregationQuery` :returns: An AggregationQuery object. """ - return AggregationQuery(self, query) + return AggregationQuery(self, query, **kwargs) def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None): """Reserve a list of IDs sequentially from a complete key. diff --git a/google/cloud/datastore/query.py b/google/cloud/datastore/query.py index c6d27855..5ff27366 100644 --- a/google/cloud/datastore/query.py +++ b/google/cloud/datastore/query.py @@ -13,20 +13,21 @@ # limitations under the License. """Create / interact with Google Cloud Datastore queries.""" - import base64 import warnings - from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list - from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key + +from google.cloud.datastore.query_profile import ExplainMetrics +from google.cloud.datastore.query_profile import QueryExplainError + import abc from abc import ABC @@ -38,6 +39,7 @@ _NO_MORE_RESULTS, query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_TYPE_UNSPECIFIED, # received when explain_options(analyze=False) ) KEY_PROPERTY_NAME = "__key__" @@ -176,6 +178,11 @@ class Query(object): :type distinct_on: sequence of string :param distinct_on: field names used to group query results. + :type explain_options: :class:`~google.cloud.datastore.ExplainOptions` + :param explain_options: (Optional) Options to enable query profiling for + this query. When set, explain_metrics will be available on the iterator + returned by query.fetch(). + :raises: ValueError if ``project`` is not passed and no implicit default is set. """ @@ -203,6 +210,7 @@ def __init__( projection=(), order=(), distinct_on=(), + explain_options=None, ): self._client = client self._kind = kind @@ -221,6 +229,7 @@ def __init__( else: self._namespace = None + self._explain_options = explain_options self._ancestor = ancestor self._filters = [] @@ -704,6 +713,7 @@ def __init__( self._timeout = timeout self._read_time = read_time # The attributes below will change over the life of the iterator. + self._explain_metrics = None self._more_results = True self._skipped_results = 0 @@ -777,7 +787,6 @@ def _next_page(self): if not self._more_results: return None - query_pb = self._build_protobuf() new_transaction_options = None transaction_id, new_transaction_options = helpers.get_transaction_options( self.client.current_transaction @@ -804,46 +813,70 @@ def _next_page(self): "project_id": self._query.project, "partition_id": partition_id, "read_options": read_options, - "query": query_pb, + "query": self._build_protobuf(), } + if self._query._explain_options: + request["explain_options"] = self._query._explain_options._to_dict() helpers.set_database_id_to_request(request, self.client.database) - response_pb = self.client._datastore_api.run_query( - request=request, - **kwargs, - ) + response_pb = None - while ( + while response_pb is None or ( response_pb.batch.more_results == _NOT_FINISHED - and response_pb.batch.skipped_results < query_pb.offset + and response_pb.batch.skipped_results < request["query"].offset ): - # We haven't finished processing. A likely reason is we haven't - # skipped all of the results yet. Don't return any results. - # Instead, rerun query, adjusting offsets. Datastore doesn't process - # more than 1000 skipped results in a query. - old_query_pb = query_pb - query_pb = query_pb2.Query() - query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - query_pb.start_cursor = response_pb.batch.end_cursor - query_pb.offset -= response_pb.batch.skipped_results - - request = { - "project_id": self._query.project, - "partition_id": partition_id, - "read_options": read_options, - "query": query_pb, - } - helpers.set_database_id_to_request(request, self.client.database) + if response_pb is not None: + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + new_query_pb = query_pb2.Query() + new_query_pb._pb.CopyFrom(request["query"]._pb) # copy for testability + new_query_pb.start_cursor = response_pb.batch.end_cursor + new_query_pb.offset -= response_pb.batch.skipped_results + request["query"] = new_query_pb response_pb = self.client._datastore_api.run_query( - request=request, - **kwargs, + request=request.copy(), **kwargs ) + # capture explain metrics if present in response + # should only be present in last response, and only if explain_options was set + if response_pb and response_pb.explain_metrics: + self._explain_metrics = ExplainMetrics._from_pb( + response_pb.explain_metrics + ) entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) + @property + def explain_metrics(self) -> ExplainMetrics: + """ + Get the metrics associated with the query execution. + Metrics are only available when explain_options is set on the query. If + ExplainOptions.analyze is False, only plan_summary is available. If it is + True, execution_stats is also available. + + :rtype: :class:`~google.cloud.datastore.query_profile.ExplainMetrics` + :returns: The metrics associated with the query execution. + :raises: :class:`~google.cloud.datastore.query_profile.QueryExplainError` + if explain_metrics is not available on the query. + """ + if self._explain_metrics is not None: + return self._explain_metrics + elif self._query._explain_options is None: + raise QueryExplainError("explain_options not set on query.") + elif self._query._explain_options.analyze is False: + # we need to run the query to get the explain_metrics + # analyze=False only returns explain_metrics, no results + self._next_page() + if self._explain_metrics is not None: + return self._explain_metrics + raise QueryExplainError( + "explain_metrics not available until query is complete." + ) + def _pb_from_query(query): """Convert a Query instance to the corresponding protobuf. diff --git a/google/cloud/datastore/query_profile.py b/google/cloud/datastore/query_profile.py new file mode 100644 index 00000000..35c31cb3 --- /dev/null +++ b/google/cloud/datastore/query_profile.py @@ -0,0 +1,143 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from typing import Any + +import datetime + +from dataclasses import dataclass +from google.protobuf.json_format import MessageToDict + + +@dataclass(frozen=True) +class ExplainOptions: + """ + Class used to configure query profiling on a query. + Set on a query object using the explain_options attribute at query construction time. + + :type analyze: bool + :param analyze: Whether to execute this query. When false (the default), + the query will be planned, returning only metrics from the planning stages. + When true, the query will be planned and executed, returning the full + query results along with both planning and execution stage metrics. + """ + + analyze: bool = False + + def _to_dict(self): + return {"analyze": self.analyze} + + +@dataclass(frozen=True) +class PlanSummary: + """ + Contains planning phase information about a query. + + :type indexes_used: list[dict[str, Any]] + :param indexes_used: The indexes selected for this query. + """ + + indexes_used: list[dict[str, Any]] + + +@dataclass(frozen=True) +class ExecutionStats: + """ + Execution phase information about a query. + + Only available when explain_options.analyze is True. + + :type results_returned: int + :param results_returned: Total number of results returned, including + documents, projections, aggregation results, keys. + :type execution_duration: datetime.timedelta + :param execution_duration: Total time to execute the query in the backend. + :type read_operations: int + :param read_operations: Total billable read operations. + :type debug_stats: dict[str, Any] + :param debug_stats: Debugging statistics from the execution of the query. + Note that the debugging stats are subject to change as Firestore evolves + """ + + results_returned: int + execution_duration: datetime.timedelta + read_operations: int + debug_stats: dict[str, Any] + + +@dataclass(frozen=True) +class ExplainMetrics: + """ + ExplainMetrics contains information about the planning and execution of a query. + + When explain_options.analyze is false, only plan_summary is available. + When explain_options.analyze is true, execution_stats is also available. + + :type plan_summary: PlanSummary + :param plan_summary: Planning phase information about the query. + :type execution_stats: ExecutionStats + :param execution_stats: Execution phase information about the query. + """ + + plan_summary: PlanSummary + + @staticmethod + def _from_pb(metrics_pb): + dict_repr = MessageToDict(metrics_pb._pb, preserving_proto_field_name=True) + plan_summary = PlanSummary( + indexes_used=dict_repr.get("plan_summary", {}).get("indexes_used", []) + ) + if "execution_stats" in dict_repr: + stats_dict = dict_repr.get("execution_stats", {}) + execution_stats = ExecutionStats( + results_returned=int(stats_dict.get("results_returned", 0)), + execution_duration=metrics_pb.execution_stats.execution_duration, + read_operations=int(stats_dict.get("read_operations", 0)), + debug_stats=stats_dict.get("debug_stats", {}), + ) + return _ExplainAnalyzeMetrics( + plan_summary=plan_summary, _execution_stats=execution_stats + ) + else: + return ExplainMetrics(plan_summary=plan_summary) + + @property + def execution_stats(self) -> ExecutionStats: + raise QueryExplainError( + "execution_stats not available when explain_options.analyze=False." + ) + + +@dataclass(frozen=True) +class _ExplainAnalyzeMetrics(ExplainMetrics): + """ + Subclass of ExplainMetrics that includes execution_stats. + Only available when explain_options.analyze is True. + """ + + plan_summary: PlanSummary + _execution_stats: ExecutionStats + + @property + def execution_stats(self) -> ExecutionStats: + return self._execution_stats + + +class QueryExplainError(Exception): + """ + Error returned when there is a problem accessing query profiling information. + """ + + pass diff --git a/samples/snippets/snippets.py b/samples/snippets/snippets.py index 749a1ce4..1b86ba8b 100644 --- a/samples/snippets/snippets.py +++ b/samples/snippets/snippets.py @@ -278,15 +278,16 @@ def sum_query_property_filter(client): # Execute sum aggregation query with filters completed_tasks = client.query(kind="Task").add_filter("done", "=", True) completed_tasks_query = client.aggregation_query(query=completed_tasks).sum( - property_ref="hours", - alias="total_completed_sum_hours" + property_ref="hours", alias="total_completed_sum_hours" ) completed_query_result = completed_tasks_query.fetch() for aggregation_results in completed_query_result: for aggregation_result in aggregation_results: if aggregation_result.alias == "total_completed_sum_hours": - print(f"Total sum of hours in completed tasks is {aggregation_result.value}") + print( + f"Total sum of hours in completed tasks is {aggregation_result.value}" + ) # [END datastore_sum_aggregation_query_with_filters] return tasks @@ -339,15 +340,16 @@ def avg_query_property_filter(client): # Execute average aggregation query with filters completed_tasks = client.query(kind="Task").add_filter("done", "=", True) completed_tasks_query = client.aggregation_query(query=completed_tasks).avg( - property_ref="hours", - alias="total_completed_avg_hours" + property_ref="hours", alias="total_completed_avg_hours" ) completed_query_result = completed_tasks_query.fetch() for aggregation_results in completed_query_result: for aggregation_result in aggregation_results: if aggregation_result.alias == "total_completed_avg_hours": - print(f"Total average of hours in completed tasks is {aggregation_result.value}") + print( + f"Total average of hours in completed tasks is {aggregation_result.value}" + ) # [END datastore_avg_aggregation_query_with_filters] return tasks @@ -375,9 +377,11 @@ def multiple_aggregations_query(client): [ datastore.aggregation.CountAggregation(alias="count_aggregation"), datastore.aggregation.SumAggregation( - property_ref="hours", alias="sum_aggregation"), + property_ref="hours", alias="sum_aggregation" + ), datastore.aggregation.AvgAggregation( - property_ref="hours", alias="avg_aggregation") + property_ref="hours", alias="avg_aggregation" + ), ] ) @@ -389,6 +393,96 @@ def multiple_aggregations_query(client): return tasks +def explain_analyze_entity(client): + # [START datastore_query_explain_analyze_entity] + # Build the query with explain_options + # analzye = true to get back the query stats, plan info, and query results + query = client.query( + kind="Task", explain_options=datastore.ExplainOptions(analyze=True) + ) + + # initiate the query + iterator = query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_entity] + + +def explain_entity(client): + # [START datastore_query_explain_entity] + # Build the query with explain_options + # by default (analyze = false), only plan_summary property is available + query = client.query(kind="Task", explain_options=datastore.ExplainOptions()) + + # initiate the query + iterator = query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_entity] + + +def explain_analyze_aggregation(client): + # [START datastore_query_explain_analyze_aggregation] + # Build the aggregation query with explain_options + # analzye = true to get back the query stats, plan info, and query results + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions(analyze=True) + ).count() + + # initiate the query + iterator = count_query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_aggregation] + + +def explain_aggregation(client): + # [START datastore_query_explain_aggregation] + # Build the aggregation query with explain_options + # by default (analyze = false), only plan_summary property is available + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions() + ).count() + + # initiate the query + iterator = count_query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_aggregation] + + def main(project_id): client = datastore.Client(project_id) diff --git a/samples/snippets/snippets_test.py b/samples/snippets/snippets_test.py index 92db0507..ae3b2948 100644 --- a/samples/snippets/snippets_test.py +++ b/samples/snippets/snippets_test.py @@ -52,17 +52,15 @@ def setup_indexes(request): indexes = [] done_property_index = datastore_admin_v1.Index.IndexedProperty( - name='done', - direction=datastore_admin_v1.Index.Direction.ASCENDING + name="done", direction=datastore_admin_v1.Index.Direction.ASCENDING ) hour_property_index = datastore_admin_v1.Index.IndexedProperty( - name='hours', - direction=datastore_admin_v1.Index.Direction.ASCENDING + name="hours", direction=datastore_admin_v1.Index.Direction.ASCENDING ) done_hour_index = datastore_admin_v1.Index( - kind='Task', + kind="Task", ancestor=datastore_admin_v1.Index.AncestorMode.NONE, - properties=[done_property_index, hour_property_index] + properties=[done_property_index, hour_property_index], ) indexes.append(done_hour_index) @@ -157,9 +155,7 @@ def test_count_query_with_stale_read(self, capsys, client): def test_sum_query_on_kind(self, capsys, client): tasks = snippets.sum_query_on_kind(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total sum of hours in tasks is 9" - ) + assert captured.out.strip() == "Total sum of hours in tasks is 9" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -168,9 +164,7 @@ def test_sum_query_on_kind(self, capsys, client): def test_sum_query_property_filter(self, capsys, client): tasks = snippets.sum_query_property_filter(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total sum of hours in completed tasks is 8" - ) + assert captured.out.strip() == "Total sum of hours in completed tasks is 8" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -179,9 +173,7 @@ def test_sum_query_property_filter(self, capsys, client): def test_avg_query_on_kind(self, capsys, client): tasks = snippets.avg_query_on_kind(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total average of hours in tasks is 3.0" - ) + assert captured.out.strip() == "Total average of hours in tasks is 3.0" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -201,15 +193,57 @@ def test_avg_query_property_filter(self, capsys, client): def test_multiple_aggregations_query(self, capsys, client): tasks = snippets.multiple_aggregations_query(client) captured = capsys.readouterr() + assert "avg_aggregation value is 3.0" in captured.out + assert "count_aggregation value is 3" in captured.out + assert "sum_aggregation value is 9" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_entity(self, capsys, client): + snippets.explain_analyze_entity(client) + captured = capsys.readouterr() assert ( - 'avg_aggregation value is 3.0' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert "Results returned: 0" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 0" in captured.out + assert "Debug stats: {" in captured.out + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_entity(self, capsys, client): + snippets.explain_entity(client) + captured = capsys.readouterr() assert ( - 'count_aggregation value is 3' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_aggregation(self, capsys, client): + snippets.explain_analyze_aggregation(client) + captured = capsys.readouterr() assert ( - 'sum_aggregation value is 9' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert "Results returned: 1" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 1" in captured.out + assert "Debug stats: {" in captured.out assert captured.err == "" - client.entities_to_delete.extend(tasks) + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_aggregation(self, capsys, client): + snippets.explain_aggregation(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert captured.err == "" diff --git a/tests/system/test_aggregation_query.py b/tests/system/test_aggregation_query.py index ae175d80..14588fde 100644 --- a/tests/system/test_aggregation_query.py +++ b/tests/system/test_aggregation_query.py @@ -530,3 +530,144 @@ def test_aggregation_query_with_nested_query_multiple_filters( ) assert result_dict["sum_appearances"].value == expected_sum assert result_dict["avg_appearances"].value == expected_sum / expected_matches + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_no_explain( + aggregation_query_client, nested_query, database_id +): + """ + When explain_options is not set, iterator.explain_metrics should raise an exception + """ + from google.cloud.datastore.query_profile import QueryExplainError + + expected_error = "explain_options not set on query" + + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=None + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator and try again + list(iterator) + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain(aggregation_query_client, nested_query, database_id): + """ + When explain_options(analyze=False) is set, iterator should contain explain_metrics field + with plan_summary but no execution_stats + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import PlanSummary + + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=False) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # should have plan_summary but no execution_stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + # execution_stats should not be present + with pytest.raises(QueryExplainError) as excinfo: + stats.execution_stats + assert "execution_stats not available" in str(excinfo.value) + # should have no results + assert len(list(iterator)) == 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain_analyze( + aggregation_query_client, nested_query, database_id +): + """ + When explain_options(analyze=True) is set, iterator should contain explain_metrics field + with plan_summary and execution_stats + + Should not be present until iterator is exhausted + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExecutionStats + from google.cloud.datastore.query_profile import PlanSummary + + expected_error = "explain_metrics not available until query is complete." + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=True) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # explain_metrics isn't present until iterator is exhausted + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator + results = list(iterator) + num_results = len(results) + assert num_results > 0 + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + # verify plan_summary + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert ( + stats.plan_summary.indexes_used[0]["properties"] + == "(appearances ASC, __name__ ASC)" + ) + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Includes ancestors" + # verify execution_stats + assert isinstance(stats.execution_stats, ExecutionStats) + assert stats.execution_stats.results_returned == num_results + assert stats.execution_stats.read_operations == num_results + duration = stats.execution_stats.execution_duration.total_seconds() + assert duration > 0 + assert duration < 1 # we expect a number closer to 0.05 + assert isinstance(stats.execution_stats.debug_stats, dict) + assert "billing_details" in stats.execution_stats.debug_stats + assert "documents_scanned" in stats.execution_stats.debug_stats + assert "index_entries_scanned" in stats.execution_stats.debug_stats + assert len(stats.execution_stats.debug_stats) > 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain_in_transaction( + aggregation_query_client, nested_query, database_id +): + """ + When an aggregation query is run in a transaction, the transaction id should be sent with the request. + The result is the same as when it is run outside of a transaction. + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExplainOptions + + with aggregation_query_client.transaction(): + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=True) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # run full query + list(iterator) + # check for stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) diff --git a/tests/system/test_query.py b/tests/system/test_query.py index b9574789..99dce2ec 100644 --- a/tests/system/test_query.py +++ b/tests/system/test_query.py @@ -61,8 +61,8 @@ def ancestor_key(query_client, in_emulator): clear_datastore.remove_all_entities(client=query_client) -def _make_ancestor_query(query_client, ancestor_key): - return query_client.query(kind="Character", ancestor=ancestor_key) +def _make_ancestor_query(query_client, ancestor_key, **kwargs): + return query_client.query(kind="Character", ancestor=ancestor_key, **kwargs) @pytest.fixture(scope="function") @@ -527,3 +527,123 @@ def test_query_add_complex_filters(ancestor_query, database_id): assert alive_count == 4 assert appearance_count == 4 assert stark_family_count == 5 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_no_explain(query_client, ancestor_key, database_id): + """ + When explain_options is not set, iterator.explain_metrics should raise an exception + """ + from google.cloud.datastore.query_profile import QueryExplainError + + expected_error = "explain_options not set on query" + query = _make_ancestor_query(query_client, ancestor_key, explain_options=None) + iterator = query.fetch() + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator and try again + list(iterator) + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain(query_client, ancestor_key, database_id): + """ + When explain_options(analyze=False) is set, iterator should contain explain_metrics field + with plan_summary but no execution_stats + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import PlanSummary + + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=False) + ) + iterator = query.fetch() + # should have plan_summary but no execution_stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert stats.plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)" + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Collection group" + # execution_stats should not be present + with pytest.raises(QueryExplainError) as excinfo: + stats.execution_stats + expected_error = "execution_stats not available when explain_options.analyze=False." + assert expected_error in str(excinfo.value) + # should have no results + assert list(iterator) == [] + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain_analyze(query_client, ancestor_key, database_id): + """ + When explain_options(analyze=True) is set, iterator should contain explain_metrics field + with plan_summary and execution_stats + + Should not be present until iterator is exhausted + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExecutionStats + from google.cloud.datastore.query_profile import PlanSummary + + expected_error = "explain_metrics not available until query is complete." + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=True) + ) + iterator = query.fetch() + # explain_metrics isn't present until iterator is exhausted + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator + results = list(iterator) + num_results = len(results) + assert num_results > 0 + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + # verify plan_summary + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert stats.plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)" + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Collection group" + # verify execution_stats + assert isinstance(stats.execution_stats, ExecutionStats) + assert stats.execution_stats.results_returned == num_results + assert stats.execution_stats.read_operations == num_results + duration = stats.execution_stats.execution_duration.total_seconds() + assert duration > 0 + assert duration < 1 # we expect a number closer to 0.05 + assert isinstance(stats.execution_stats.debug_stats, dict) + assert "billing_details" in stats.execution_stats.debug_stats + assert "documents_scanned" in stats.execution_stats.debug_stats + assert "index_entries_scanned" in stats.execution_stats.debug_stats + assert len(stats.execution_stats.debug_stats) > 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain_in_transaction(query_client, ancestor_key, database_id): + """ + Should be able to access explain metrics when called in a transaction + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExplainOptions + + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=True) + ) + client = query._client + with client.transaction(): + # run full query + iterator = query.fetch() + list(iterator) + # check for stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) diff --git a/tests/unit/test_aggregation.py b/tests/unit/test_aggregation.py index 8284b808..5a595220 100644 --- a/tests/unit/test_aggregation.py +++ b/tests/unit/test_aggregation.py @@ -278,10 +278,25 @@ def test_query_fetch_w_explicit_client_w_limit(client, database_id): assert iterator._limit == limit +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_aggregation_uses_nested_query_explain_options(client, database_id): + """ + If explain_options is set on the nested query but not the aggregation, + use the nested query's explain_options. + """ + expected_explain_options = mock.Mock() + query = _make_query(client, explain_options=expected_explain_options) + aggregation_query = _make_aggregation_query( + client=client, query=query, explain_options=None + ) + assert aggregation_query._explain_options is expected_explain_options + + def test_iterator_constructor_defaults(): - query = object() + query = mock.Mock() client = object() aggregation_query = AggregationQuery(client=client, query=query) + assert aggregation_query._explain_options == query._explain_options iterator = _make_aggregation_iterator(aggregation_query, client) assert not iterator._started @@ -292,12 +307,17 @@ def test_iterator_constructor_defaults(): assert iterator._more_results assert iterator._retry is None assert iterator._timeout is None + assert iterator._explain_metrics is None def test_iterator_constructor_explicit(): query = object() client = object() - aggregation_query = AggregationQuery(client=client, query=query) + explain_options = object() + aggregation_query = AggregationQuery( + client=client, query=query, explain_options=explain_options + ) + assert aggregation_query._explain_options is explain_options retry = mock.Mock() timeout = 100000 limit = 2 @@ -315,6 +335,7 @@ def test_iterator_constructor_explicit(): assert iterator._retry == retry assert iterator._timeout == timeout assert iterator._limit == limit + assert iterator._explain_metrics is None def test_iterator__build_protobuf_empty(): @@ -404,16 +425,13 @@ def test_iterator__process_query_results_finished_result(): def test_iterator__process_query_results_unexpected_result(): - from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.aggregation import AggregationResult iterator = _make_aggregation_iterator(None, None) aggregation_pbs = [AggregationResult(alias="total", value=1)] - more_results_enum = ( - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_TYPE_UNSPECIFIED - ) + more_results_enum = 999 response_pb = _make_aggregation_query_response(aggregation_pbs, more_results_enum) with pytest.raises(ValueError): iterator._process_query_results(response_pb) @@ -451,6 +469,169 @@ def test_iterator__next_page_no_more(): ds_api.run_aggregation_query.assert_not_called() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +@pytest.mark.parametrize("analyze", [True, False]) +def test_iterator_sends_explain_options_w_request(database_id, analyze): + """ + When query has explain_options set, all requests should include + the explain_options field. + """ + from google.cloud.datastore.query_profile import ExplainOptions + + response_pb = _make_aggregation_query_response([], 0) + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=analyze) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + iterator._next_page() + # ensure explain_options is set in request + assert ds_api.run_aggregation_query.call_count == 1 + found_explain_options = ds_api.run_aggregation_query.call_args[1]["request"][ + "explain_options" + ] + assert found_explain_options == explain_options._to_dict() + assert found_explain_options["analyze"] == analyze + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics(database_id): + """ + If explain_metrics is recieved from backend, it should be set on the iterator + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb = _make_aggregation_query_response([], 0) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + query = _make_aggregation_query(client=client, query=_make_query(client)) + iterator = _make_aggregation_iterator(query, client) + assert iterator._explain_metrics is None + iterator._next_page() + assert isinstance(iterator._explain_metrics, ExplainMetrics) + assert iterator._explain_metrics == ExplainMetrics._from_pb(expected_metrics) + assert iterator.explain_metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_explain(database_id): + """ + If query has no explain_options set, iterator.explain_metrics should raise + an exception. + """ + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api_for_aggregation() + client = _Client(None, datastore_api=ds_api) + query = _make_aggregation_query(client, _make_query(client), explain_options=None) + iterator = _make_aggregation_iterator(query, client) + assert iterator._explain_metrics is None + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert "explain_options not set on query" in str(exc.value) + # should not raise error if field is set + iterator._explain_metrics = object() + assert iterator.explain_metrics is iterator._explain_metrics + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + response_pb = _make_aggregation_query_response([], 0) + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + assert ds_api.run_aggregation_query.call_count == 0 + metrics = iterator.explain_metrics + # ensure explain_options is set in request + assert ds_api.run_aggregation_query.call_count == 1 + assert isinstance(metrics, ExplainMetrics) + assert metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call_failed(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + If the call does not result in explain_metrics data, it should raise a QueryExplainError. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + # mocked response does not return explain_metrics + response_pb = _make_aggregation_query_response([], 0) + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + assert ds_api.run_aggregation_query.call_count == 0 + with pytest.raises(QueryExplainError): + iterator.explain_metrics + assert ds_api.run_aggregation_query.call_count == 1 + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_analyze_access_before_complete(database_id): + """ + If query.explain_options(analyze=True), accessing iterator.explain_metrics + before the query is complete should raise an exception. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api_for_aggregation() + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=True) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + expected_error = "explain_metrics not available until query is complete" + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert expected_error in str(exc.value) + + def _next_page_helper(txn_id=None, retry=None, timeout=None, database_id=None): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py index fa7d63dc..75fa31fa 100644 --- a/tests/unit/test_query.py +++ b/tests/unit/test_query.py @@ -1072,6 +1072,159 @@ def test_iterator__next_page_w_skipped_lt_offset(skipped_cursor_1, database_id): assert ds_api.run_query.call_args_list == expected_calls +@pytest.mark.parametrize("database_id", [None, "somedb"]) +@pytest.mark.parametrize("analyze", [True, False]) +def test_iterator_sends_explain_options_w_request(database_id, analyze): + """ + When query has explain_options set, all requests should include + the explain_options field. + """ + from google.cloud.datastore.query_profile import ExplainOptions + + response_pb = _make_query_response([], b"", 0, 0) + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=analyze) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + iterator._next_page() + # ensure explain_options is set in request + assert ds_api.run_query.call_count == 1 + found_explain_options = ds_api.run_query.call_args[1]["request"]["explain_options"] + assert found_explain_options == explain_options._to_dict() + assert found_explain_options["analyze"] == analyze + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics(database_id): + """ + If explain_metrics is recieved from backend, it should be set on the iterator + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb = _make_query_response([], b"", 0, 0) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + query = Query(client) + iterator = _make_iterator(query, client) + assert iterator._explain_metrics is None + iterator._next_page() + assert isinstance(iterator._explain_metrics, ExplainMetrics) + assert iterator._explain_metrics == ExplainMetrics._from_pb(expected_metrics) + assert iterator.explain_metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_explain(database_id): + """ + If query has no explain_options set, iterator.explain_metrics should raise + an exception. + """ + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) + query = Query(client, explain_options=None) + iterator = _make_iterator(query, client) + assert iterator._explain_metrics is None + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert "explain_options not set on query" in str(exc.value) + # should not raise error if field is set + expected_metrics = object() + iterator._explain_metrics = expected_metrics + assert iterator.explain_metrics is expected_metrics + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + response_pb = _make_query_response([], b"", 0, 0) + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + assert ds_api.run_query.call_count == 0 + metrics = iterator.explain_metrics + # ensure explain_options is set in request + assert ds_api.run_query.call_count == 1 + assert isinstance(metrics, ExplainMetrics) + assert metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call_failed(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + If the call does not result in explain_metrics data, it should raise a QueryExplainError. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + # mocked response does not return explain_metrics + response_pb = _make_query_response([], b"", 0, 0) + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + assert ds_api.run_query.call_count == 0 + with pytest.raises(QueryExplainError): + iterator.explain_metrics + assert ds_api.run_query.call_count == 1 + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_analyze_access_before_complete(database_id): + """ + If query.explain_options(analyze=True), accessing iterator.explain_metrics + before the query is complete should raise an exception. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) + query = _make_query(client, explain_options=ExplainOptions(analyze=True)) + iterator = _make_iterator(query, client) + expected_error = "explain_metrics not available until query is complete" + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert expected_error in str(exc.value) + + def test__item_to_entity(): from google.cloud.datastore.query import _item_to_entity diff --git a/tests/unit/test_query_profile.py b/tests/unit/test_query_profile.py new file mode 100644 index 00000000..d8c8fdd8 --- /dev/null +++ b/tests/unit/test_query_profile.py @@ -0,0 +1,126 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + + +def test_explain_metrics__from_pb(): + """ + Test creating an instance of ExplainMetrics from a protobuf. + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + _ExplainAnalyzeMetrics, + QueryExplainError, + PlanSummary, + ) + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import struct_pb2, duration_pb2 + + # test without execution_stats field + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary( + indexes_used=struct_pb2.ListValue(values=[]) + ) + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics.plan_summary, PlanSummary) + assert metrics.plan_summary.indexes_used == [] + with pytest.raises(QueryExplainError) as exc: + metrics.execution_stats + assert "execution_stats not available when explain_options.analyze=False" in str( + exc.value + ) + # test with execution_stats field + expected_metrics.execution_stats = query_profile_pb2.ExecutionStats( + results_returned=1, + execution_duration=duration_pb2.Duration(seconds=2), + read_operations=3, + debug_stats=struct_pb2.Struct( + fields={"foo": struct_pb2.Value(string_value="bar")} + ), + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics, _ExplainAnalyzeMetrics) + assert metrics.execution_stats.results_returned == 1 + assert metrics.execution_stats.execution_duration.total_seconds() == 2 + assert metrics.execution_stats.read_operations == 3 + assert metrics.execution_stats.debug_stats == {"foo": "bar"} + + +def test_explain_metrics__from_pb_empty(): + """ + Test with empty ExplainMetrics protobuf. + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + ExecutionStats, + _ExplainAnalyzeMetrics, + PlanSummary, + ) + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import struct_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary( + indexes_used=struct_pb2.ListValue(values=[]) + ), + execution_stats=query_profile_pb2.ExecutionStats(), + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics, _ExplainAnalyzeMetrics) + assert isinstance(metrics.plan_summary, PlanSummary) + assert isinstance(metrics.execution_stats, ExecutionStats) + assert metrics.plan_summary.indexes_used == [] + assert metrics.execution_stats.results_returned == 0 + assert metrics.execution_stats.execution_duration.total_seconds() == 0 + assert metrics.execution_stats.read_operations == 0 + assert metrics.execution_stats.debug_stats == {} + + +def test_explain_metrics_execution_stats(): + """ + Standard ExplainMetrics class should raise exception when execution_stats is accessed. + _ExplainAnalyzeMetrics should include the field + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + QueryExplainError, + _ExplainAnalyzeMetrics, + ) + + metrics = ExplainMetrics(plan_summary=object()) + with pytest.raises(QueryExplainError) as exc: + metrics.execution_stats + assert "execution_stats not available when explain_options.analyze=False" in str( + exc.value + ) + expected_stats = object() + metrics = _ExplainAnalyzeMetrics( + plan_summary=object(), _execution_stats=expected_stats + ) + assert metrics.execution_stats is expected_stats + + +def test_explain_options__to_dict(): + """ + Should be able to create a dict representation of ExplainOptions + """ + from google.cloud.datastore.query_profile import ExplainOptions + + assert ExplainOptions(analyze=True)._to_dict() == {"analyze": True} + assert ExplainOptions(analyze=False)._to_dict() == {"analyze": False} From 6ef337581c415ff6536bfb81d7370ebffdef68ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:27:01 -0700 Subject: [PATCH 23/24] chore(python): fix docs build (#556) Source-Link: https://github.com/googleapis/synthtool/commit/bef813d194de29ddf3576eda60148b6b3dcc93d9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .github/.OwlBot.lock.yaml | 3 ++- .kokoro/docker/docs/Dockerfile | 9 ++++----- .kokoro/publish-docs.sh | 20 ++++++++++---------- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 001b1b1c..6d064ddb 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e + digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 +# created: 2024-07-31T14:52:44.926548819Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 5205308b..e5410e29 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 38f083f0..233205d5 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 66c5958a19c7ecb74378b118ee89e61c60f217ae Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:46:19 -0700 Subject: [PATCH 24/24] chore(main): release 2.20.0 (#531) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Daniel Sanche --- .release-please-manifest.json | 2 +- CHANGELOG.md | 16 ++++++++++++++++ google/cloud/datastore/gapic_version.py | 2 +- google/cloud/datastore/version.py | 2 +- google/cloud/datastore_admin/gapic_version.py | 2 +- google/cloud/datastore_admin_v1/gapic_version.py | 2 +- google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 22 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b7f666a6..ba3e06a7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.19.0" + ".": "2.20.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 52d6dfc7..925fe2e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.20.0](https://github.com/googleapis/python-datastore/compare/v2.19.0...v2.20.0) (2024-08-07) + + +### Features + +* Add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats ([#521](https://github.com/googleapis/python-datastore/issues/521)) ([dfbee2d](https://github.com/googleapis/python-datastore/commit/dfbee2db800a3ca99e65a5d386ea907db1c46598)) +* Add new_transaction support ([#499](https://github.com/googleapis/python-datastore/issues/499)) ([43855dd](https://github.com/googleapis/python-datastore/commit/43855dd1762f51771bb1a3924c6a234641950fb6)) +* Implement query profiling ([#542](https://github.com/googleapis/python-datastore/issues/542)) ([1500f70](https://github.com/googleapis/python-datastore/commit/1500f7007f251256ce2923e1168439d40d41cc4d)) +* New PropertyMask field which allows partial commits, lookups, and query results ([7fd218b](https://github.com/googleapis/python-datastore/commit/7fd218b2afc0282d8fea21992e8d10c5eec72ac7)) + + +### Bug Fixes + +* Retry and timeout values do not propagate in requests during pagination ([#555](https://github.com/googleapis/python-datastore/issues/555)) ([5e773cb](https://github.com/googleapis/python-datastore/commit/5e773cb8c766303fef53965dd100b3c4c93b98be)) +* Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. ([#552](https://github.com/googleapis/python-datastore/issues/552)) ([4982f9a](https://github.com/googleapis/python-datastore/commit/4982f9a6cbbe2de449535295a363a2dd49538c86)) + ## [2.19.0](https://github.com/googleapis/python-datastore/compare/v2.18.0...v2.19.0) (2023-12-10) diff --git a/google/cloud/datastore/gapic_version.py b/google/cloud/datastore/gapic_version.py index 28762874..0dca6536 100644 --- a/google/cloud/datastore/gapic_version.py +++ b/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py index 2605c08a..9fea4fec 100644 --- a/google/cloud/datastore/version.py +++ b/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" +__version__ = "2.20.0" diff --git a/google/cloud/datastore_admin/gapic_version.py b/google/cloud/datastore_admin/gapic_version.py index 0f1a446f..551f0d2e 100644 --- a/google/cloud/datastore_admin/gapic_version.py +++ b/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/google/cloud/datastore_admin_v1/gapic_version.py b/google/cloud/datastore_admin_v1/gapic_version.py index 8dc121fd..34a9e352 100644 --- a/google/cloud/datastore_admin_v1/gapic_version.py +++ b/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/google/cloud/datastore_v1/gapic_version.py b/google/cloud/datastore_v1/gapic_version.py index 8dc121fd..34a9e352 100644 --- a/google/cloud/datastore_v1/gapic_version.py +++ b/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version}